From 76d99a3d85349a091b4fef7dee7b39156c358e76 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Sun, 7 Jun 2020 19:23:39 -0700 Subject: [PATCH 01/70] copying over changes to new fork Signed-off-by: Shivesh Ranjan --- build.gradle | 66 +- .../client/SchemaRegistryClient.java | 358 ++++++++ .../client/SchemaRegistryClientConfig.java | 31 + .../client/SchemaRegistryClientFactory.java | 25 + .../client/SchemaRegistryClientImpl.java | 461 ++++++++++ .../client/exceptions/RegistryExceptions.java | 187 ++++ .../client/TestSchemaRegistryClient.java | 612 +++++++++++++ .../common/ContinuationTokenIterator.java | 93 ++ .../pravega/schemaregistry/common/Either.java | 51 ++ .../schemaregistry/common/HashUtil.java | 21 + .../common/ContinuationTokenIteratorTest.java | 68 ++ .../contract/data/Compatibility.java | 203 ++++ .../contract/data/EncodingId.java | 35 + .../contract/data/EncodingInfo.java | 33 + .../contract/data/GroupHistoryRecord.java | 47 + .../contract/data/GroupProperties.java | 74 ++ .../contract/data/SchemaInfo.java | 62 ++ .../contract/data/SchemaValidationRule.java | 23 + .../contract/data/SchemaValidationRules.java | 66 ++ .../contract/data/SchemaWithVersion.java | 31 + .../contract/data/SerializationFormat.java | 45 + .../contract/data/VersionInfo.java | 47 + .../generated/rest/model/AddedTo.java | 101 ++ .../generated/rest/model/CanRead.java | 92 ++ .../generated/rest/model/CodecTypesList.java | 101 ++ .../generated/rest/model/Compatibility.java | 216 +++++ .../rest/model/CreateGroupRequest.java | 117 +++ .../generated/rest/model/EncodingId.java | 92 ++ .../generated/rest/model/EncodingInfo.java | 144 +++ .../rest/model/GetEncodingIdRequest.java | 117 +++ .../generated/rest/model/GroupHistory.java | 101 ++ .../rest/model/GroupHistoryRecord.java | 194 ++++ .../generated/rest/model/GroupProperties.java | 179 ++++ .../rest/model/ListGroupsResponse.java | 128 +++ .../generated/rest/model/SchemaInfo.java | 179 ++++ .../rest/model/SchemaValidationRule.java | 92 ++ .../rest/model/SchemaValidationRules.java | 103 +++ .../rest/model/SchemaVersionsList.java | 102 +++ .../rest/model/SchemaWithVersion.java | 119 +++ .../rest/model/SerializationFormat.java | 154 ++++ .../model/UpdateValidationRulesRequest.java | 116 +++ .../contract/generated/rest/model/Valid.java | 92 ++ .../generated/rest/model/ValidateRequest.java | 117 +++ .../generated/rest/model/VersionInfo.java | 142 +++ .../rest/server/api/ApiException.java | 10 + .../rest/server/api/ApiOriginFilter.java | 22 + .../rest/server/api/ApiResponseMessage.java | 69 ++ .../generated/rest/server/api/Bootstrap.java | 31 + .../generated/rest/server/api/GroupsApi.java | 412 +++++++++ .../rest/server/api/GroupsApiService.java | 54 ++ .../rest/server/api/JacksonJsonProvider.java | 18 + .../rest/server/api/NotFoundException.java | 10 + .../generated/rest/server/api/SchemasApi.java | 74 ++ .../rest/server/api/SchemasApiService.java | 22 + .../generated/rest/server/api/StringUtil.java | 42 + .../factories/GroupsApiServiceFactory.java | 13 + .../factories/SchemasApiServiceFactory.java | 13 + .../server/api/impl/GroupsApiServiceImpl.java | 134 +++ .../api/impl/SchemasApiServiceImpl.java | 26 + .../contract/transform/ModelHelper.java | 243 +++++ .../schemaregistry/contract/v1/ApiV1.java | 554 +++++++++++ contract/src/main/swagger/README.md | 44 + contract/src/main/swagger/SchemaRegistry.yaml | 867 ++++++++++++++++++ contract/src/main/swagger/server.config.json | 8 + .../contract/transform/ModelHelperTest.java | 138 +++ 65 files changed, 8240 insertions(+), 1 deletion(-) create mode 100644 client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java create mode 100644 client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java create mode 100644 client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientFactory.java create mode 100644 client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java create mode 100644 client/src/main/java/io/pravega/schemaregistry/client/exceptions/RegistryExceptions.java create mode 100644 client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java create mode 100644 common/src/main/java/io/pravega/schemaregistry/common/ContinuationTokenIterator.java create mode 100644 common/src/main/java/io/pravega/schemaregistry/common/Either.java create mode 100644 common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java create mode 100644 common/src/test/java/io/pravega/schemaregistry/common/ContinuationTokenIteratorTest.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingId.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupProperties.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaInfo.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRule.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaWithVersion.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/AddedTo.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CanRead.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CreateGroupRequest.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingId.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingInfo.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GetEncodingIdRequest.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistory.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupProperties.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ListGroupsResponse.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaInfo.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRule.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRules.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaVersionsList.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SerializationFormat.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateValidationRulesRequest.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Valid.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ValidateRequest.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/VersionInfo.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiException.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiOriginFilter.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiResponseMessage.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/Bootstrap.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApi.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApiService.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/JacksonJsonProvider.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/NotFoundException.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApi.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApiService.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/GroupsApiServiceFactory.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/SchemasApiServiceFactory.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/GroupsApiServiceImpl.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/SchemasApiServiceImpl.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java create mode 100644 contract/src/main/swagger/README.md create mode 100644 contract/src/main/swagger/SchemaRegistry.yaml create mode 100644 contract/src/main/swagger/server.config.json create mode 100644 contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java diff --git a/build.gradle b/build.gradle index a1f1bb5d6..52125793d 100644 --- a/build.gradle +++ b/build.gradle @@ -34,7 +34,6 @@ buildscript { } } dependencies { - classpath group: 'com.google.protobuf', name:'protobuf-gradle-plugin', version: protobufGradlePlugin classpath "gradle.plugin.org.nosphere.apache:creadur-rat-gradle:0.3.0" classpath group: 'org.hidetake', name: 'gradle-ssh-plugin', version: gradleSshPluginVersion classpath group: 'gradle.plugin.com.github.spotbugs', name: 'spotbugs-gradle-plugin', version: spotbugsPluginVersion @@ -122,6 +121,71 @@ allprojects { } } +project('common') { + dependencies { + compile group: 'commons-io', name: 'commons-io', version: commonsioVersion + compile group: 'com.google.guava', name: 'guava', version: guavaVersion + compile group: 'io.pravega', name: 'pravega-common', version: pravegaVersion + //Do NOT add any additional dependencies here. + } + + javadoc { + title = "Common Libraries" + dependsOn delombok + source = delombok.outputDir + failOnError = false + options.addBooleanOption("Xdoclint:none", true) + } +} + +project('client') { + dependencies { + compile project(':common') + compile project(':contract') + compile group: 'org.glassfish.jersey.ext', name: 'jersey-proxy-client', version: jerseyVersion + compile group: 'org.glassfish.jersey.core', name: 'jersey-client', version: jerseyVersion + testCompile group: 'org.slf4j', name: 'log4j-over-slf4j', version: slf4jApiVersion + testCompile group: 'ch.qos.logback', name: 'logback-classic', version: qosLogbackVersion + testCompile group: 'io.pravega', name: 'pravega-test-testcommon', version: pravegaVersion + } + + javadoc { + title = "Registry Client" + dependsOn delombok + source = delombok.outputDir + failOnError = false + exclude "**/impl/**"; + options.addBooleanOption("Xdoclint:all,-reference", true) + } +} + +project('contract') { + dependencies { + compile project(':common') + testCompile group: 'org.slf4j', name: 'log4j-over-slf4j', version: slf4jApiVersion + testCompile group: 'ch.qos.logback', name: 'logback-classic', version: qosLogbackVersion + compile group: 'javax.servlet', name: 'javax.servlet-api', version: javaxServletApiVersion + compile(group: 'io.swagger', name : 'swagger-jersey2-jaxrs', version :swaggerJersey2JaxrsVersion) { + exclude group: 'com.google.guava', module: 'guava' + } + compile group: 'org.glassfish.jersey.containers', name: 'jersey-container-grizzly2-http', version: jerseyVersion + compile group: 'org.glassfish.jersey.inject', name: 'jersey-hk2', version: jerseyVersion + compile group: 'org.glassfish.jersey.media', name: 'jersey-media-json-jackson', version: jerseyVersion + compile group: 'javax.xml.bind', name: 'jaxb-api', version: jaxbVersion + compile group: 'org.glassfish.jaxb', name: 'jaxb-runtime', version: jaxbVersion + + } + + javadoc { + title = "Registry Contract" + dependsOn delombok + source = delombok.outputDir + failOnError = false + exclude "**/impl/**"; + options.addBooleanOption("Xdoclint:all,-reference", true) + } +} + def getProjectVersion() { String ver = schemaregistryVersion if (grgit && ver.contains("-SNAPSHOT")) { diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java new file mode 100644 index 000000000..5ed87a3aa --- /dev/null +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java @@ -0,0 +1,358 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.client; + +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.GroupHistoryRecord; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SchemaValidationRules; +import io.pravega.schemaregistry.contract.data.SchemaWithVersion; +import io.pravega.schemaregistry.contract.data.VersionInfo; + +import javax.annotation.Nullable; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import static io.pravega.schemaregistry.client.exceptions.RegistryExceptions.*; + +/** + * Defines a registry client for interacting with schema registry service. + * The implementation of this interface should provide atomicity and read-after-write-consistency guarantees for all the methods. + */ +public interface SchemaRegistryClient { + /** + * Adds a new group. A group refers to the name under which the schemas are registered. A group is identified by a + * unique name and has an associated set of group metadata {@link GroupProperties} and a list of codec types and a + * versioned history of schemas that were registered under the group. + * Add group is idempotent. If the group by the same name already exists the api will return false. + * + * @param groupId Id for the group that uniquely identifies the group. + * @param groupProperties groupProperties Group properties for the group. These include serialization format, validation rules, + * and flag to declare whether multiple schemas representing distinct object types can be + * registered with the group. Type identify objects of same type. Schema compatibility checks + * are always performed for schemas that share same {@link SchemaInfo#type}. + * Additionally, a user defined map of properties can be supplied. + * @return True indicates if the group was added successfully, false if it exists. + * @throws BadArgumentException if the group properties is rejected by service. + * @throws UnauthorizedException if the user is unauthorized. + */ + boolean addGroup(String groupId, GroupProperties groupProperties) throws BadArgumentException, UnauthorizedException; + + /** + * Removes a group identified by the groupId. This will remove all the codec types and schemas registered under the group. + * Remove group is idempotent. + * + * @param groupId Id for the group that uniquely identifies the group. + * @throws UnauthorizedException if the user is unauthorized. + */ + void removeGroup(String groupId) throws UnauthorizedException; + + /** + * List all groups that the user is authorized on. This returns an iterator where each element is a pair of group + * name and group properties. + * This iterator can be used to iterate over each element until all elements are exhausted. + * The implementation should guarantee that all groups added before and until the iterator returns + * {@link Iterator#hasNext()} = true can be iterated over. + * + * @return map of names of groups with corresponding group properties for all groups. + * @throws UnauthorizedException if the user is unauthorized. + */ + Iterator> listGroups() throws UnauthorizedException; + + /** + * Get group properties for the group identified by the group id. + * + * {@link GroupProperties#serializationFormat} which identifies the serialization format is used to describe the schema. + * {@link GroupProperties#schemaValidationRules} sets the schema validation policy that needs to be enforced for evolving schemas. + * {@link GroupProperties#allowMultipleTypes} that specifies if multiple schemas are allowed to be registered in the group. + * Schemas are validated against existing schema versions that have the same {@link SchemaInfo#type}. + * {@link GroupProperties#properties} describes generic properties for a group. + * + * @param groupId Id for the group. + * @return Group properties which includes property like Serialization format and compatibility policy. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + GroupProperties getGroupProperties(String groupId) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Update group's schema validation policy. If previous rules are not supplied, then the update to the rules will be + * performed unconditionally. However, if previous rules are supplied, then the update will be performed if and only if + * existing {@link GroupProperties#schemaValidationRules} match previous rules. + * + * @param groupId Id for the group. + * @param validationRules New Schema validation rules for the group. + * @param previousRules Previous schema validation rules. + * @return true if the update was accepted by the service, false if it was rejected because of precondition failure. + * Precondition failure can occur if previous rules were specified and they do not match the rules set on the group. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + boolean updateSchemaValidationRules(String groupId, SchemaValidationRules validationRules, @Nullable SchemaValidationRules previousRules) + throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets list of latest schemas for each object types registered under the group. Objects are identified by {@link SchemaInfo#type}. + * Schemas are retrieved atomically. So all schemas added before this call will be returned by this call. + * + * @param groupId Id for the group. + * @return List of different objects within the group. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + List getSchemas(String groupId) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Registers schema to the group. Schemas are validated against existing schemas in the group that share the same + * {@link SchemaInfo#type}. + * If group is configured with {@link GroupProperties#allowMultipleTypes} then multiple schemas with distinct + * type {@link SchemaInfo#type} could be registered. + * All schemas with same type are assigned monotonically increasing version numbers. + * Add schema api is idempotent. If a schema is already registered, its version info is returned by the service. + * + * @param groupId Id for the group. + * @param schemaInfo Schema to add. + * @return versionInfo which uniquely identifies where the schema is added in the group. If schema is already registered, + * then the existing version info is returned. + * @throws SchemaValidationFailedException if the schema is deemed invalid by applying schema validation rules which may + * include comparing schema with existing schemas for compatibility in the desired direction. + * @throws SerializationMismatchException if serialization format does not match the group's configured serialization format. + * @throws MalformedSchemaException for known serialization formats, if the service is unable to parse the schema binary or + * for avro and protobuf if the {@link SchemaInfo#type} does not match the name of record/message in the binary. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + VersionInfo addSchema(String groupId, SchemaInfo schemaInfo) throws SchemaValidationFailedException, SerializationMismatchException, + MalformedSchemaException, ResourceNotFoundException, UnauthorizedException; + + /** + * Api to delete schema corresponding to the version. Users should be very careful while using this API in production, + * esp if the schema has already been used to write the data. + * Delete schema api is idempotent. + * This does a soft delete of the schema. So getSchemaVersion with the version info will still return the schema. + * However, the schema will not participate in any compatibility checks once deleted. + * It will not be included in listing schema versions for the group using apis like {@link SchemaRegistryClient#getSchemaVersions} + * or {@link SchemaRegistryClient#getGroupHistory} or {@link SchemaRegistryClient#getSchemas} or + * {@link SchemaRegistryClient#getLatestSchemaVersion} + * If add schema is called again using this deleted schema will result in a new version being assigned to it upon registration. + * + * @param groupId Id for the group. + * @param versionInfo Version which uniquely identifies schema within a group. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + void deleteSchemaVersion(String groupId, VersionInfo versionInfo) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Api to delete schema corresponding to the schemaType and version. + * Users should be very careful while using this API in production, esp if the schema has already been used to write the data. + * Delete schema api is idempotent. + * This does a soft delete of the schema. So getSchemaVersion with the version info will still return the schema. + * However, the schema will not participate in any compatibility checks once deleted. + * It will not be included in listing schema versions for the group using apis like {@link SchemaRegistryClient#getSchemaVersions} + * or {@link SchemaRegistryClient#getGroupHistory} or {@link SchemaRegistryClient#getSchemas} or + * {@link SchemaRegistryClient#getLatestSchemaVersion} + * If add schema is called again using this deleted schema will result in a new version being assigned to upon registration. + * + * @param groupId Id for the group. + * @param schemaType schemaType that identifies the type of object the schema represents. This should be same as the + * value specified in {@link SchemaInfo#type}. + * @param version Version number which uniquely identifies schema for the schemaType within a group. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + void deleteSchemaVersion(String groupId, String schemaType, int version) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets schema corresponding to the version. + * + * @param groupId Id for the group. + * @param versionInfo Version which uniquely identifies schema within a group. + * @return Schema info corresponding to the version info. + * @throws ResourceNotFoundException if group or version is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + SchemaInfo getSchemaForVersion(String groupId, VersionInfo versionInfo) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets schema corresponding to the version. + * + * @param groupId Id for the group. + * @param schemaType schemaType as specified in the {@link SchemaInfo#type} while registering the schema. + * @param version Version which uniquely identifies schema of schemaType within a group. + * @return Schema info corresponding to the version info. + * @throws ResourceNotFoundException if group or version is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + SchemaInfo getSchemaForVersion(String groupId, String schemaType, int version) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets encoding info against the requested encoding Id. The purpose of encoding info is to uniquely identify the encoding + * used on the data at rest. The encoding covers two parts - + * 1. Schema that defines the structure of the data and is used for serialization. A specific schema version registered with + * registry service is uniquely identified by the corresponding VersionInfo object. + * 2. CodecType that is used to encode the serialized data. This would typically be some compression. The codecType + * and schema should both be registered with the service and service will generate a unique identifier for each such + * pair. + * Encoding Info uniquely identifies a combination of a versionInfo and codecType. + * EncodingInfo also includes the {@link SchemaInfo} identified by the {@link VersionInfo}. + * + * @param groupId Id for the group. + * @param encodingId Encoding id that uniquely identifies a schema within a group. + * @return Encoding info corresponding to the encoding id. + * @throws ResourceNotFoundException if group or encoding id is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + EncodingInfo getEncodingInfo(String groupId, EncodingId encodingId) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets an encoding id that uniquely identifies a combination of Schema version and codec type. + * This encoding id is a 4 byte integer and it can be used to tag the data which is serialized and encoded using the + * schema version and codecType identified by this encoding id. + * This api is idempotent. And if an encoding id is generated for a version and codec pair, subsequent requests to this + * api will return the generated encoding id. + * If the schema identified by the version is deleted using {@link SchemaRegistryClient#deleteSchemaVersion} api, + * then if the encoding id was already generated for the pair of schema version and codec, then it will be returned. + * However, if no encoding id for the versioninfo and codec pair was generated and the schema version was deleted, + * then any call to getEncodingId using the deleted versionInfo will throw ResourceNotFoundException. + * + * @param groupId Id for the group. + * @param versionInfo version of schema + * @param codecType codec type + * @return Encoding id for the pair of version and codec type. + * @throws CodecTypeNotRegisteredException if codectype is not registered with the group. Use {@link SchemaRegistryClient#addCodecType} + * @throws ResourceNotFoundException if group or version info is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + EncodingId getEncodingId(String groupId, VersionInfo versionInfo, String codecType) + throws CodecTypeNotRegisteredException, ResourceNotFoundException, UnauthorizedException; + + /** + * Gets latest schema and version for the group (or type, if specified). + * To get latest schema version for a specific type identified by {@link SchemaInfo#type}, provide the type. + * Otherwise if the group is configured to allow multiple schemas {@link GroupProperties#allowMultipleTypes}, then + * and type is not specified, then last schema added to the group across all types will be returned. + * + * @param groupId Id for the group. + * @param schemaType Type of object identified by {@link SchemaInfo#type}. + * + * @return Schema with version for the last schema that was added to the group (or type). + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schemaType) + throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets version corresponding to the schema. + * For each schema type {@link SchemaInfo#type} and {@link SchemaInfo#serializationFormat} a versionInfo object uniquely + * identifies each distinct {@link SchemaInfo#schemaData}. + * + * @param groupId Id for the group. + * @param schemaInfo SchemaInfo that describes format and structure. + * @return VersionInfo corresponding to schema. + * @throws ResourceNotFoundException if group is not found or if schema is not registered. + * @throws UnauthorizedException if the user is unauthorized. + */ + VersionInfo getVersionForSchema(String groupId, SchemaInfo schemaInfo) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets all schemas with corresponding versions for the group (or type, if specified). + * For groups configured with {@link GroupProperties#allowMultipleTypes}, the type {@link SchemaInfo#type} should be + * supplied to view schemas specific to a type. if type is null, all schemas in the group are returned. + * The order in the list matches the order in which schemas were evolved within the group. + * + * @param groupId Id for the group. + * @param schemaType type of object identified by {@link SchemaInfo#type}. + * @return Ordered list of schemas with versions and validation rules for all schemas in the group. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + List getSchemaVersions(String groupId, @Nullable String schemaType) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Checks whether given schema is valid by applying validation rules against previous schemas in the group + * subject to current {@link GroupProperties#schemaValidationRules} policy. + * This api performs exactly the same validations as {@link SchemaRegistryClient#addSchema(String, SchemaInfo)} + * but without registering the schema. This is primarily intended to be used during schema development phase to validate that + * the changes to schema are in compliance with validation rules for the group. + * + * @param groupId Id for the group. + * @param schemaInfo Schema to check for validity. + * @return A schema is valid if it passes all the {@link GroupProperties#schemaValidationRules}. The rule supported + * presently, is Compatibility. If desired compatibility is satisfied by the schema then this api returns true, false otherwise. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + boolean validateSchema(String groupId, SchemaInfo schemaInfo) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Checks whether given schema can be used to read by validating it for reads against one or more existing schemas in the group + * subject to current {@link GroupProperties#schemaValidationRules} policy. + * + * @param groupId Id for the group. + * @param schemaInfo Schema to check to be used for reads. + * @return True if it can be used to read, false otherwise. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + boolean canReadUsing(String groupId, SchemaInfo schemaInfo) throws ResourceNotFoundException, UnauthorizedException; + + /** + * List of codec types used for encoding in the group. + * + * @param groupId Id for the group. + * @return List of codec types used for encoding in the group. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + List getCodecTypes(String groupId) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Add new codec type to be used in encoding in the group. + * + * @param groupId Id for the group. + * @param codecType codec type. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + void addCodecType(String groupId, String codecType) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets complete schema evolution history of the group with schemas, versions, rules and time for the group. + * The order in the list matches the order in which schemas were evolved within the group. + * This call is atomic and will get a consistent view at the time when the request is processed on the service. + * So all schemas that were added before this call are returned and all schemas that were deleted before this call + * are excluded. + * + * @param groupId Id for the group. + * @return Ordered list of schemas with versions and validation rules for all schemas in the group. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + List getGroupHistory(String groupId) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Finds all groups and corresponding version info for the groups where the supplied schema has been registered. + * It is important to note that the same schema type could be part of multiple group, however in each group it + * may have gone through a separate evolution. This api simply identifies all groups where the specific schema + * (type, format and binary) is used. + * The user defined {@link SchemaInfo#properties} is not used for comparison. + * + * @param schemaInfo Schema info to find references for. + * @return Map of group Id to versionInfo identifier for the schema in that group. + * @throws ResourceNotFoundException if schema is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + Map getSchemaReferences(SchemaInfo schemaInfo) throws ResourceNotFoundException, UnauthorizedException; +} diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java new file mode 100644 index 000000000..66bb39921 --- /dev/null +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java @@ -0,0 +1,31 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.client; + +import lombok.Builder; +import lombok.Data; + +import java.net.URI; + +/** + * Registry client configuration used to create registry client. + */ +@Data +@Builder +public class SchemaRegistryClientConfig { + /** + * URI for connecting with registry client. + */ + private final URI schemaRegistryUri; + + private SchemaRegistryClientConfig(URI schemaRegistryUri) { + this.schemaRegistryUri = schemaRegistryUri; + } +} diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientFactory.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientFactory.java new file mode 100644 index 000000000..caba3d815 --- /dev/null +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientFactory.java @@ -0,0 +1,25 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.client; + +/** + * Factory class for creating Schema Registry client. + */ +public class SchemaRegistryClientFactory { + /** + * Factory method to create Schema Registry Client. + * + * @param config Configuration for creating registry client. + * @return SchemaRegistry client implementation + */ + public static SchemaRegistryClient createRegistryClient(SchemaRegistryClientConfig config) { + return new SchemaRegistryClientImpl(config.getSchemaRegistryUri()); + } +} diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java new file mode 100644 index 000000000..5e46b69c1 --- /dev/null +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java @@ -0,0 +1,461 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.client; + +import com.google.common.annotations.VisibleForTesting; +import io.pravega.common.Exceptions; +import io.pravega.common.util.Retry; +import io.pravega.schemaregistry.common.ContinuationTokenIterator; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.GroupHistoryRecord; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SchemaValidationRules; +import io.pravega.schemaregistry.contract.data.SchemaWithVersion; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; +import io.pravega.schemaregistry.contract.generated.rest.model.CreateGroupRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.GetEncodingIdRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; +import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.Valid; +import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; +import io.pravega.schemaregistry.contract.transform.ModelHelper; +import io.pravega.schemaregistry.contract.v1.ApiV1; +import org.glassfish.jersey.client.ClientConfig; +import org.glassfish.jersey.client.proxy.WebResourceFactory; + +import javax.annotation.Nullable; +import javax.ws.rs.client.Client; +import javax.ws.rs.client.ClientBuilder; +import javax.ws.rs.core.Response; +import java.net.URI; +import java.util.AbstractMap; +import java.util.Collection; +import java.util.Comparator; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static io.pravega.schemaregistry.client.exceptions.RegistryExceptions.*; + +public class SchemaRegistryClientImpl implements SchemaRegistryClient { + private static final Retry.RetryAndThrowConditionally RETRY = Retry + .withExpBackoff(100, 2, 10, 1000) + .retryWhen(x -> Exceptions.unwrap(x) instanceof ConnectionException); + private static final int GROUP_LIMIT = 100; + private static final int SCHEMA_LIMIT = 10; + + private final ApiV1.GroupsApi groupProxy; + private final ApiV1.SchemasApi schemaProxy; + + SchemaRegistryClientImpl(URI uri) { + Client client = ClientBuilder.newClient(new ClientConfig()); + this.groupProxy = WebResourceFactory.newResource(ApiV1.GroupsApi.class, client.target(uri)); + this.schemaProxy = WebResourceFactory.newResource(ApiV1.SchemasApi.class, client.target(uri)); + } + + @VisibleForTesting + SchemaRegistryClientImpl(ApiV1.GroupsApi groupProxy) { + this(groupProxy, null); + } + + @VisibleForTesting + SchemaRegistryClientImpl(ApiV1.GroupsApi groupProxy, ApiV1.SchemasApi schemaProxy) { + this.groupProxy = groupProxy; + this.schemaProxy = schemaProxy; + } + + @Override + public boolean addGroup(String groupId, GroupProperties groupProperties) { + return withRetry(() -> { + CreateGroupRequest request = new CreateGroupRequest().groupName(groupId).groupProperties(ModelHelper.encode(groupProperties)); + Response response = groupProxy.createGroup(request); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case CREATED: + return true; + case CONFLICT: + return false; + case BAD_REQUEST: + throw new BadArgumentException("Group properties invalid. Verify that schema validation rules include compatibility."); + default: + throw new InternalServerError("Internal Service error. Failed to add the group."); + } + }); + } + + @Override + public void removeGroup(String groupId) { + withRetry(() -> { + Response response = groupProxy.deleteGroup(groupId); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case NO_CONTENT: + return; + default: + throw new InternalServerError("Internal Service error. Failed to remove the group."); + } + }); + } + + @Override + public Iterator> listGroups() { + final Function>>> function = + continuationToken -> { + ListGroupsResponse entity = getListGroupsResponse(continuationToken); + List> map = new LinkedList<>(); + for (Map.Entry entry : entity.getGroups().entrySet()) { + ModelHelper.decode(entry.getValue().getSerializationFormat()); + map.add(new AbstractMap.SimpleEntry<>(entry.getKey(), ModelHelper.decode(entry.getValue()))); + } + return new AbstractMap.SimpleEntry<>(entity.getContinuationToken(), map); + }; + + return new ContinuationTokenIterator<>(function, null); + } + + private ListGroupsResponse getListGroupsResponse(String continuationToken) { + return withRetry(() -> { + Response response = groupProxy.listGroups(continuationToken, GROUP_LIMIT); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return response.readEntity(ListGroupsResponse.class); + default: + throw new InternalServerError("Internal Service error. Failed to list groups."); + } + }); + } + + @Override + public GroupProperties getGroupProperties(String groupId) { + return withRetry(() -> { + Response response = groupProxy.getGroupProperties(groupId); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties.class)); + case NOT_FOUND: + throw new ResourceNotFoundException("Group not found."); + default: + throw new InternalServerError("Internal Service error. Failed to list groups."); + } + }); + } + + @Override + public boolean updateSchemaValidationRules(String groupId, SchemaValidationRules validationRules, @Nullable SchemaValidationRules previousRules) { + return withRetry(() -> { + UpdateValidationRulesRequest request = new UpdateValidationRulesRequest() + .validationRules(ModelHelper.encode(validationRules)); + if (previousRules != null) { + request.setPreviousRules(ModelHelper.encode(previousRules)); + } + + Response response = groupProxy.updateSchemaValidationRules(groupId, request); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case CONFLICT: + return false; + case NOT_FOUND: + throw new ResourceNotFoundException("Group not found."); + case OK: + return true; + default: + throw new InternalServerError("Internal Service error. Failed to update schema validation rules."); + } + }); + } + + @Override + public List getSchemas(String groupId) { + return latestSchemas(groupId, null); + } + + private List latestSchemas(String groupId, String type) { + return withRetry(() -> { + Response response = groupProxy.getSchemas(groupId, type); + SchemaVersionsList objectsList = response.readEntity(SchemaVersionsList.class); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return objectsList.getSchemas().stream().map(ModelHelper::decode).collect(Collectors.toList()); + case NOT_FOUND: + throw new ResourceNotFoundException("Group not found."); + default: + throw new InternalServerError("Internal Service error. Failed to get object types."); + } + }); + } + + @Override + public VersionInfo addSchema(String groupId, SchemaInfo schemaInfo) { + return withRetry(() -> { + Response response = groupProxy.addSchema(groupId, ModelHelper.encode(schemaInfo)); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case CREATED: + return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo.class)); + case NOT_FOUND: + throw new ResourceNotFoundException("Group not found."); + case CONFLICT: + throw new SchemaValidationFailedException("Schema is incompatible."); + case EXPECTATION_FAILED: + throw new SerializationMismatchException("Serialization format disallowed."); + case BAD_REQUEST: + throw new MalformedSchemaException("Schema is malformed. Verify the schema data and type"); + default: + throw new InternalServerError("Internal Service error. Failed to addSchema."); + } + }); + } + + @Override + public void deleteSchemaVersion(String groupId, VersionInfo versionInfo) { + withRetry(() -> { + Response response = groupProxy.deleteSchemaFromVersionOrdinal(groupId, versionInfo.getOrdinal()); + if (response.getStatus() == Response.Status.NOT_FOUND.getStatusCode()) { + throw new ResourceNotFoundException("Group not found."); + } else if (response.getStatus() != Response.Status.NO_CONTENT.getStatusCode()) { + throw new InternalServerError("Internal Service error. Failed to get schema."); + } + }); + } + + @Override + public void deleteSchemaVersion(String groupId, String schemaType, int version) { + withRetry(() -> { + Response response = groupProxy.deleteSchemaVersion(groupId, schemaType, version); + if (response.getStatus() == Response.Status.NOT_FOUND.getStatusCode()) { + throw new ResourceNotFoundException("Group not found."); + } else if (response.getStatus() != Response.Status.NO_CONTENT.getStatusCode()) { + throw new InternalServerError("Internal Service error. Failed to get schema."); + } + }); + } + + @Override + public SchemaInfo getSchemaForVersion(String groupId, VersionInfo versionInfo) { + return withRetry(() -> { + Response response = groupProxy.getSchemaFromVersionOrdinal(groupId, versionInfo.getOrdinal()); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo.class)); + case NOT_FOUND: + throw new ResourceNotFoundException("Schema not found."); + default: + throw new InternalServerError("Internal Service error. Failed to get schema."); + } + }); + } + + @Override + public SchemaInfo getSchemaForVersion(String groupId, String schemaType, int version) { + return withRetry(() -> { + Response response = groupProxy.getSchemaFromVersion(groupId, schemaType, version); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo.class)); + case NOT_FOUND: + throw new ResourceNotFoundException("Schema not found."); + default: + throw new InternalServerError("Internal Service error. Failed to get schema."); + } + }); + } + + @Override + public EncodingInfo getEncodingInfo(String groupId, EncodingId encodingId) { + return withRetry(() -> { + Response response = groupProxy.getEncodingInfo(groupId, encodingId.getId()); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo.class)); + case NOT_FOUND: + throw new ResourceNotFoundException("Encoding not found."); + default: + throw new InternalServerError("Internal Service error. Failed to get encoding info."); + } + }); + } + + @Override + public EncodingId getEncodingId(String groupId, VersionInfo versionInfo, String codecType) { + return withRetry(() -> { + GetEncodingIdRequest getEncodingIdRequest = new GetEncodingIdRequest(); + getEncodingIdRequest.codecType(codecType) + .versionInfo(ModelHelper.encode(versionInfo)); + Response response = groupProxy.getEncodingId(groupId, getEncodingIdRequest); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.EncodingId.class)); + case NOT_FOUND: + throw new ResourceNotFoundException("getEncodingId failed. Either Group or Version does not exist."); + case PRECONDITION_FAILED: + throw new CodecTypeNotRegisteredException(String.format("Codec type %s not registered.", codecType)); + default: + throw new InternalServerError("Internal Service error. Failed to get encoding info."); + } + }); + } + + @Override + public SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schemaType) { + List list = latestSchemas(groupId, schemaType); + if (schemaType == null) { + return list.stream().max(Comparator.comparingInt(x -> x.getVersionInfo().getOrdinal())).orElse(null); + } else { + return list.get(0); + } + } + + @Override + public List getSchemaVersions(String groupId, @Nullable String schemaType) { + return withRetry(() -> { + Response response = groupProxy.getSchemaVersions(groupId, schemaType); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + SchemaVersionsList schemaList = response.readEntity(SchemaVersionsList.class); + return schemaList.getSchemas().stream().map(ModelHelper::decode).collect(Collectors.toList()); + case NOT_FOUND: + throw new ResourceNotFoundException("getSchemaVersions failed. Group does not exist."); + default: + throw new InternalServerError("Internal Service error. Failed to get schema versions for group."); + } + }); + } + + @Override + public List getGroupHistory(String groupId) { + return withRetry(() -> { + Response response = groupProxy.getGroupHistory(groupId); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory history = response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory.class); + return history.getHistory().stream().map(ModelHelper::decode).collect(Collectors.toList()); + case NOT_FOUND: + throw new ResourceNotFoundException("getGroupHistory failed. Either Group or Version does not exist."); + default: + throw new InternalServerError("Internal Service error. Failed to get schema evolution history for group."); + } + }); + } + + @Override + public Map getSchemaReferences(SchemaInfo schemaInfo) throws ResourceNotFoundException, UnauthorizedException { + return withRetry(() -> { + Response response = schemaProxy.getSchemaReferences(ModelHelper.encode(schemaInfo)); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + io.pravega.schemaregistry.contract.generated.rest.model.AddedTo addedTo = response + .readEntity(io.pravega.schemaregistry.contract.generated.rest.model.AddedTo.class); + return addedTo.getGroups().entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> ModelHelper.decode(x.getValue()))); + case NOT_FOUND: + throw new ResourceNotFoundException("getSchemaReferences failed. Either Group or Version does not exist."); + default: + throw new InternalServerError("Internal Service error. Failed to get schema evolution history for group."); + } + }); + } + + @Override + public VersionInfo getVersionForSchema(String groupId, SchemaInfo schema) { + return withRetry(() -> { + io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo schemaInfo = ModelHelper.encode(schema); + + Response response = groupProxy.getSchemaVersion(groupId, schemaInfo); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo.class)); + case NOT_FOUND: + throw new ResourceNotFoundException("Schema not found."); + default: + throw new InternalServerError("Internal Service error. Failed to get schema version."); + } + }); + } + + @Override + public boolean validateSchema(String groupId, SchemaInfo schemaInfo) { + return withRetry(() -> { + ValidateRequest validateRequest = new ValidateRequest() + .schemaInfo(ModelHelper.encode(schemaInfo)); + Response response = groupProxy.validate(groupId, validateRequest); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return response.readEntity(Valid.class).isValid(); + case NOT_FOUND: + throw new ResourceNotFoundException("Group not found."); + default: + throw new InternalServerError("Internal Service error."); + } + }); + } + + @Override + public boolean canReadUsing(String groupId, SchemaInfo schemaInfo) { + return withRetry(() -> { + io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo request = ModelHelper.encode(schemaInfo); + Response response = groupProxy.canRead(groupId, request); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return response.readEntity(CanRead.class).isCompatible(); + case NOT_FOUND: + throw new ResourceNotFoundException("Schema not found."); + default: + throw new InternalServerError("Internal Service error."); + } + }); + } + + @Override + public List getCodecTypes(String groupId) { + return withRetry(() -> { + Response response = groupProxy.getCodecTypesList(groupId); + CodecTypesList list = response.readEntity(CodecTypesList.class); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return list.getCodecTypes(); + case NOT_FOUND: + throw new ResourceNotFoundException("Group not found."); + default: + throw new InternalServerError("Failed to get codecTypes. Internal server error."); + } + }); + } + + @Override + public void addCodecType(String groupId, String codecType) { + withRetry(() -> { + Response response = groupProxy.addCodecType(groupId, codecType); + + switch (Response.Status.fromStatusCode(response.getStatus())) { + case CREATED: + return; + case NOT_FOUND: + throw new ResourceNotFoundException("Group not found."); + default: + throw new InternalServerError("Failed to add codec type. Internal server error."); + } + }); + } + + private T withRetry(Supplier supplier) { + return RETRY.run(supplier::get); + } + + private void withRetry(Runnable runnable) { + RETRY.run(() -> { + runnable.run(); + return null; + }); + } +} diff --git a/client/src/main/java/io/pravega/schemaregistry/client/exceptions/RegistryExceptions.java b/client/src/main/java/io/pravega/schemaregistry/client/exceptions/RegistryExceptions.java new file mode 100644 index 000000000..7bbb28966 --- /dev/null +++ b/client/src/main/java/io/pravega/schemaregistry/client/exceptions/RegistryExceptions.java @@ -0,0 +1,187 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.client.exceptions; + +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Getter +public class RegistryExceptions extends RuntimeException { + /** + * Enum to describe the type of exception. + */ + public enum Type { + UNAUTHORIZED, + BAD_ARGUMENT, + PRECONDITION_FAILED, + CODEC_NOT_FOUND, + MALFORMED_SCHEMA, + INCOMPATIBLE_SCHEMA, + RESOURCE_NOT_FOUND, + SERIALIZATION_FORMAT_MISMATCH, + CONNECTION_ERROR, + INTERNAL_SERVER_ERROR + } + + /** + * Trait to identify whether an exception is retryable or not. + */ + public interface RetryableException { + } + + /** + * Construct a StoreException. + * + * @param errorMessage The detailed error message. + */ + public RegistryExceptions(final String errorMessage) { + super(errorMessage); + } + + /** + * Factory method to construct Store exceptions. + * + * @param type Type of Exception. + * @param errorMessage The detailed error message. + * @return Instance of type of StoreException. + */ + public static RegistryExceptions create(final Type type, final String errorMessage) { + Preconditions.checkArgument(errorMessage != null && !errorMessage.isEmpty(), + "Either cause or errorMessage should be non-empty"); + RegistryExceptions exception; + switch (type) { + case UNAUTHORIZED: + exception = new UnauthorizedException(errorMessage); + break; + case BAD_ARGUMENT: + exception = new BadArgumentException(errorMessage); + break; + case PRECONDITION_FAILED: + exception = new PreconditionFailedException(errorMessage); + break; + case CODEC_NOT_FOUND: + exception = new CodecTypeNotRegisteredException(errorMessage); + break; + case INCOMPATIBLE_SCHEMA: + exception = new SchemaValidationFailedException(errorMessage); + break; + case RESOURCE_NOT_FOUND: + exception = new ResourceNotFoundException(errorMessage); + break; + case SERIALIZATION_FORMAT_MISMATCH: + exception = new SerializationMismatchException(errorMessage); + break; + case CONNECTION_ERROR: + exception = new ConnectionException(errorMessage); + break; + case INTERNAL_SERVER_ERROR: + exception = new InternalServerError(errorMessage); + break; + default: + throw new IllegalArgumentException("Invalid exception type"); + } + return exception; + } + + /** + * User is unauthorized to perform requested action. + */ + public static class UnauthorizedException extends RegistryExceptions { + public UnauthorizedException(String errorMessage) { + super(errorMessage); + } + } + + /** + * Service rejected the supplied arguments with bad argument exception. + */ + public static class BadArgumentException extends RegistryExceptions { + public BadArgumentException(String errorMessage) { + super(errorMessage); + } + } + + /** + * Service rejected the request because the expected precondition for the requested action was not satisfied. + */ + public static class PreconditionFailedException extends RegistryExceptions { + public PreconditionFailedException(String errorMessage) { + super(errorMessage); + } + } + + /** + * The requested codecType is not added to the group. + */ + public static class CodecTypeNotRegisteredException extends RegistryExceptions { + public CodecTypeNotRegisteredException(String errorMessage) { + super(errorMessage); + } + } + + /** + * Schema is malformed. Verify the schema data and type. + */ + public static class MalformedSchemaException extends RegistryExceptions { + public MalformedSchemaException(String errorMessage) { + super(errorMessage); + } + } + + /** + * The schema validation failed as it was validated against the ValidationRules set for the group. + */ + public static class SchemaValidationFailedException extends RegistryExceptions { + public SchemaValidationFailedException(String errorMessage) { + super(errorMessage); + } + } + + /** + * Requested resource not found. + */ + public static class ResourceNotFoundException extends RegistryExceptions { + public ResourceNotFoundException(String errorMessage) { + super(errorMessage); + } + } + + /** + * Serialization format is not allowed for the group. Check {@link SchemaInfo#serializationFormat} with + * {@link GroupProperties#serializationFormat}. + */ + public static class SerializationMismatchException extends RegistryExceptions { + public SerializationMismatchException(String errorMessage) { + super(errorMessage); + } + } + + /** + * Exception type due to failure in connecting to the service. + */ + public static class ConnectionException extends RegistryExceptions implements RetryableException { + public ConnectionException(String errorMessage) { + super(errorMessage); + } + } + + /** + * The request processing failed on the service. + */ + public static class InternalServerError extends RegistryExceptions implements RetryableException { + public InternalServerError(String errorMessage) { + super(errorMessage); + } + } +} diff --git a/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java b/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java new file mode 100644 index 000000000..f4427b183 --- /dev/null +++ b/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java @@ -0,0 +1,612 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.client; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import io.pravega.schemaregistry.contract.data.Compatibility; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SchemaValidationRules; +import io.pravega.schemaregistry.contract.data.SchemaWithVersion; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; +import io.pravega.schemaregistry.contract.generated.rest.model.Valid; +import io.pravega.schemaregistry.contract.transform.ModelHelper; +import io.pravega.schemaregistry.contract.v1.ApiV1; +import io.pravega.test.common.AssertExtensions; +import lombok.val; +import org.junit.Test; + +import javax.ws.rs.core.Response; +import java.nio.ByteBuffer; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import static io.pravega.schemaregistry.client.exceptions.RegistryExceptions.*; +import static org.junit.Assert.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + +public class TestSchemaRegistryClient { + @Test + public void testGroup() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + + // add group + // 1. success response code + io.pravega.schemaregistry.contract.data.GroupProperties groupProperties = new io.pravega.schemaregistry.contract.data.GroupProperties( + SerializationFormat.Avro, SchemaValidationRules.of(Compatibility.backward()), true); + doReturn(response).when(proxy).createGroup(any()); + doReturn(Response.Status.CREATED.getStatusCode()).when(response).getStatus(); + boolean addGroup = client.addGroup("grp1", groupProperties); + assertTrue(addGroup); + + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + addGroup = client.addGroup("grp1", groupProperties); + assertFalse(addGroup); + + doReturn(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("Exception should have been thrown", + () -> client.addGroup("grp1", groupProperties), + e -> e instanceof InternalServerError); + reset(response); + + // list groups + doReturn(response).when(proxy).listGroups(null, 100); + Response response2 = mock(Response.class); + doReturn(response2).when(proxy).listGroups("token", 100); + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + doReturn(Response.Status.OK.getStatusCode()).when(response2).getStatus(); + GroupProperties mygroup = new GroupProperties().properties(Collections.emptyMap()) + .serializationFormat(new io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat() + .serializationFormat(io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat.SerializationFormatEnum.ANY)) + .schemaValidationRules(ModelHelper.encode(SchemaValidationRules.of(Compatibility.backward()))) + .allowMultipleTypes(false); + String groupName = "mygroup"; + ListGroupsResponse groupList = new ListGroupsResponse().groups(Collections.singletonMap(groupName, mygroup)).continuationToken("token"); + doReturn(groupList).when(response).readEntity(eq(ListGroupsResponse.class)); + doReturn(new ListGroupsResponse().groups(Collections.emptyMap()).continuationToken("token")).when(response2).readEntity(eq(ListGroupsResponse.class)); + + val groups = Lists.newArrayList(client.listGroups()); + assertEquals(1, groups.size()); + assertTrue(groups.stream().anyMatch(x -> x.getKey().equals(groupName))); + Map.Entry group = + groups.stream().filter(x -> x.getKey().equals(groupName)).findAny().orElseThrow(RuntimeException::new); + assertEquals(group.getValue().getSerializationFormat(), SerializationFormat.Any); + assertEquals(group.getValue().getSchemaValidationRules().getRules().get(Compatibility.class.getSimpleName()), Compatibility.backward()); + + reset(response); + } + + @Test + public void testListGroup() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + GroupProperties mygroup = new GroupProperties().properties(Collections.emptyMap()) + .serializationFormat(new io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat() + .serializationFormat(io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat.SerializationFormatEnum.ANY)) + .schemaValidationRules(ModelHelper.encode(SchemaValidationRules.of(Compatibility.backward()))) + .allowMultipleTypes(false); + String groupId = "mygroup"; + ListGroupsResponse groupList = new ListGroupsResponse().groups(Collections.singletonMap(groupId, mygroup)).continuationToken("token"); + ListGroupsResponse groupList2 = new ListGroupsResponse().groups(Collections.emptyMap()).continuationToken("token"); + doReturn(response).when(proxy).listGroups(null, 100); + Response response2 = mock(Response.class); + doReturn(response2).when(proxy).listGroups("token", 100); + doReturn(Response.Status.OK.getStatusCode()).when(response2).getStatus(); + + doReturn(groupList).when(response).readEntity(eq(ListGroupsResponse.class)); + doReturn(groupList2).when(response2).readEntity(eq(ListGroupsResponse.class)); + val groups = Lists.newArrayList(client.listGroups()); + assertEquals(1, groups.size()); + assertTrue(groups.stream().anyMatch(x -> x.getKey().equals(groupId))); + Map.Entry group = + groups.stream().filter(x -> x.getKey().equals(groupId)).findAny().orElseThrow(RuntimeException::new); + assertEquals(group.getValue().getSerializationFormat(), SerializationFormat.Any); + assertEquals(group.getValue().getSchemaValidationRules().getRules().get(Compatibility.class.getSimpleName()), Compatibility.backward()); + + // Runtime Exception + doReturn(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("Exception should have been thrown", () -> Lists.newArrayList(client.listGroups()), e -> e instanceof InternalServerError); + } + + @Test + public void testRemoveGroup() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).deleteGroup(anyString()); + doReturn(Response.Status.NO_CONTENT.getStatusCode()).when(response).getStatus(); + + client.removeGroup("mygroup"); + + // not OK response + doReturn(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", () -> client.removeGroup("mygroup"), + e -> e instanceof InternalServerError); + } + + @Test + public void testGetGroupProperties() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getGroupProperties(anyString()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + GroupProperties mygroup + = new GroupProperties().properties(Collections.emptyMap()) + .serializationFormat(new io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat() + .serializationFormat( + io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat.SerializationFormatEnum.ANY)) + .schemaValidationRules(ModelHelper.encode(SchemaValidationRules.of(Compatibility.backward()))) + .allowMultipleTypes(false); + doReturn(mygroup).when(response).readEntity(eq(GroupProperties.class)); + io.pravega.schemaregistry.contract.data.GroupProperties groupProperties = client.getGroupProperties("mygroup"); + assertEquals(groupProperties.getSerializationFormat(), SerializationFormat.Any); + assertEquals(groupProperties.getSchemaValidationRules().getRules().get(Compatibility.class.getSimpleName()), + Compatibility.backward()); + // ResourceNotFoundException + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", () -> client.getGroupProperties( + "mygroup"), e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", () -> client.getGroupProperties( + "mygroup"), e -> e instanceof InternalServerError); + } + + @Test + public void testUpdateSchemaValidationRules() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).updateSchemaValidationRules(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + SchemaValidationRules schemaValidationRules = SchemaValidationRules.of(Compatibility.backward()); + client.updateSchemaValidationRules("mygroup", schemaValidationRules, null); + assertEquals(response.getStatus(), Response.Status.OK.getStatusCode()); + // Precondition Failed + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + assertFalse(client.updateSchemaValidationRules("mygroup", schemaValidationRules, null)); + // NotFound exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.updateSchemaValidationRules("mygroup", schemaValidationRules, null), + e -> e instanceof ResourceNotFoundException); + // Runtime Exception + doReturn(Response.Status.EXPECTATION_FAILED.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.updateSchemaValidationRules("mygroup", schemaValidationRules, null), + e -> e instanceof InternalServerError); + } + + @Test + public void testSchemasApi() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getSchemas(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + VersionInfo versionInfo = new VersionInfo("schema1", 5, 5); + io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion schemaVersion = new io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion() + .schemaInfo(ModelHelper.encode(schemaInfo)).version(ModelHelper.encode(versionInfo)); + SchemaVersionsList schemaList = new SchemaVersionsList(); + schemaList.addSchemasItem(schemaVersion); + doReturn(schemaList).when(response).readEntity(SchemaVersionsList.class); + List output = client.getSchemas("mygroup"); + assertEquals(1, output.size()); + assertEquals("schema1", output.get(0).getSchemaInfo().getType()); + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", () -> client.getSchemas("mygroup"), + e -> e instanceof ResourceNotFoundException); + // Runtime exception + doReturn(Response.Status.EXPECTATION_FAILED.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", () -> client.getSchemas("mygroup"), + e -> e instanceof InternalServerError); + } + + @Test + public void testAddSchema() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).addSchema(anyString(), any()); + doReturn(Response.Status.CREATED.getStatusCode()).when(response).getStatus(); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo versionInfo = + new io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo().version( + 5).type("schema2").ordinal(5); + doReturn(versionInfo).when(response).readEntity( + io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo.class); + VersionInfo versionInfo1 = client.addSchema("mygroup", schemaInfo); + assertEquals(5, versionInfo1.getVersion()); + assertEquals("schema2", versionInfo1.getType()); + assertEquals(5, versionInfo1.getOrdinal()); + // NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.addSchema("mygroup", schemaInfo), e -> e instanceof ResourceNotFoundException); + // SchemaIncompatible exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.addSchema("mygroup", schemaInfo), e -> e instanceof SchemaValidationFailedException); + // SerializationFormatInvalid Exception + doReturn(Response.Status.EXPECTATION_FAILED.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.addSchema("mygroup", schemaInfo), e -> e instanceof SerializationMismatchException); + //Runtime Exception + doReturn(Response.Status.BAD_GATEWAY.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.addSchema("mygroup", schemaInfo), e -> e instanceof InternalServerError); + } + + @Test + public void testGetSchema() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getSchemaFromVersionOrdinal(anyString(), anyInt()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat serializationFormat = ModelHelper.encode(SerializationFormat.custom("custom")); + byte[] schemaData = new byte[0]; + + io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo schemaInfo = + new io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo() + .schemaData(schemaData).type("schema1").serializationFormat(serializationFormat).properties(Collections.emptyMap()); + VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); + doReturn(schemaInfo).when(response).readEntity( + io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo.class); + SchemaInfo schemaInfo1 = client.getSchemaForVersion("mygroup", versionInfo); + assertEquals(schemaInfo.getType(), schemaInfo1.getType()); + // NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getSchemaForVersion("mygroup", versionInfo), e -> e instanceof ResourceNotFoundException); + // Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getSchemaForVersion("mygroup", versionInfo), e -> e instanceof InternalServerError); + } + + @Test + public void testGetEncodingInfo() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getEncodingInfo(anyString(), anyInt()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + String codecType = "gzip"; + EncodingInfo encodingInfo = new EncodingInfo(versionInfo, schemaInfo, codecType); + EncodingId encodingId = new EncodingId(5); + doReturn(ModelHelper.encode(encodingInfo)).when(response).readEntity( + io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo.class); + EncodingInfo encodingInfo1 = client.getEncodingInfo("mygroup", encodingId); + assertEquals(encodingInfo.getCodecType(), encodingInfo1.getCodecType()); + assertEquals(encodingInfo.getSchemaInfo(), encodingInfo1.getSchemaInfo()); + assertEquals(encodingInfo.getVersionInfo(), encodingInfo1.getVersionInfo()); + // NotFound exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getEncodingInfo("mygroup", encodingId), e -> e instanceof ResourceNotFoundException); + // Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getEncodingInfo("mygroup", encodingId), e -> e instanceof InternalServerError); + } + + @Test + public void testGetEncodingId() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getEncodingId(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + String codecType = "gzip"; + VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); + io.pravega.schemaregistry.contract.generated.rest.model.EncodingId encodingId = ModelHelper.encode(new EncodingId(5)); + doReturn(encodingId).when(response).readEntity( + io.pravega.schemaregistry.contract.generated.rest.model.EncodingId.class); + EncodingId encodingId1 = client.getEncodingId("mygroup", versionInfo, codecType); + assertEquals(encodingId.getEncodingId().intValue(), encodingId1.getId()); + // NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getEncodingId("mygroup", versionInfo, codecType), e -> e instanceof ResourceNotFoundException); + // StringNotFound Exception + doReturn(Response.Status.PRECONDITION_FAILED.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getEncodingId("mygroup", versionInfo, codecType), e -> e instanceof CodecTypeNotRegisteredException); + // Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getEncodingId("mygroup", versionInfo, codecType), e -> e instanceof InternalServerError); + } + + @Test + public void testGetLatestSchemaForGroup() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getSchemas(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + SchemaWithVersion schemaWithVersion = new SchemaWithVersion(schemaInfo, versionInfo); + SchemaVersionsList schemaWithVersions = new SchemaVersionsList().schemas(Collections.singletonList(ModelHelper.encode(schemaWithVersion))); + doReturn(schemaWithVersions).when(response).readEntity( + SchemaVersionsList.class); + SchemaWithVersion schemaWithVersion1 = client.getLatestSchemaVersion("mygroup", null); + assertEquals(schemaWithVersion.getSchemaInfo(), schemaWithVersion1.getSchemaInfo()); + assertEquals(schemaWithVersion.getVersionInfo(), schemaWithVersion1.getVersionInfo()); + // NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getLatestSchemaVersion("mygroup", null), e -> e instanceof ResourceNotFoundException); + // Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getLatestSchemaVersion("mygroup", null), e -> e instanceof InternalServerError); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + versionInfo = new VersionInfo("schema2", 5, 5); + serializationFormat = SerializationFormat.custom("custom"); + schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + schemaWithVersion = new SchemaWithVersion(schemaInfo, versionInfo); + doReturn(ModelHelper.encode(schemaWithVersion)).when(response).readEntity( + io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion.class); + schemaWithVersion1 = client.getLatestSchemaVersion("mygroup", "myobject"); + assertEquals(schemaWithVersion.getSchemaInfo(), schemaWithVersion1.getSchemaInfo()); + assertEquals(schemaWithVersion.getVersionInfo(), schemaWithVersion1.getVersionInfo()); + // NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getLatestSchemaVersion("mygroup", "myobject"), e -> e instanceof ResourceNotFoundException); + // Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getLatestSchemaVersion("mygroup", "myobject"), e -> e instanceof InternalServerError); + } + + @Test + public void testGroupEvolutionHistory() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getGroupHistory(anyString()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + SchemaValidationRules schemaValidationRules = SchemaValidationRules.of(Compatibility.backward()); + GroupHistoryRecord groupHistoryRecord = new io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord() + .schemaInfo(ModelHelper.encode(schemaInfo)).version(ModelHelper.encode(versionInfo)) + .validationRules(ModelHelper.encode(schemaValidationRules)).timestamp(100L).schemaString(""); + GroupHistory history = new GroupHistory(); + history.addHistoryItem(groupHistoryRecord); + doReturn(history).when(response).readEntity(GroupHistory.class); + List groupHistoryList = client.getGroupHistory("mygroup"); + assertEquals(1, groupHistoryList.size()); + assertEquals(schemaValidationRules, groupHistoryList.get(0).getRules()); + assertEquals(schemaInfo, groupHistoryList.get(0).getSchema()); + assertEquals(versionInfo, groupHistoryList.get(0).getVersion()); + assertEquals(100L, groupHistoryList.get(0).getTimestamp()); + assertEquals("", groupHistoryList.get(0).getSchemaString()); + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getGroupHistory("mygroup"), e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getGroupHistory("mygroup"), e -> e instanceof InternalServerError); + } + + @Test + public void testGetSchemaVersion() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getSchemaVersion(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); + doReturn(ModelHelper.encode(versionInfo)).when(response).readEntity( + io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo.class); + VersionInfo versionInfo1 = client.getVersionForSchema("mygroup", schemaInfo); + assertEquals(versionInfo.getType(), versionInfo1.getType()); + assertEquals(versionInfo.getVersion(), versionInfo1.getVersion()); + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getVersionForSchema("mygroup", schemaInfo), e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getVersionForSchema("mygroup", schemaInfo), e -> e instanceof InternalServerError); + } + + @Test + public void testGetSchemaVersions() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getSchemaVersions(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); + SchemaWithVersion schemaWithVersion = new SchemaWithVersion(schemaInfo, versionInfo); + SchemaVersionsList list = new SchemaVersionsList().schemas(Collections.singletonList(ModelHelper.encode(schemaWithVersion))); + doReturn(list).when(response).readEntity(SchemaVersionsList.class); + List result = Lists.newArrayList(client.getSchemaVersions("mygroup", null)); + assertEquals(result.size(), 1); + assertEquals(versionInfo, result.get(0).getVersionInfo()); + assertEquals(schemaInfo, result.get(0).getSchemaInfo()); + + result = Lists.newArrayList(client.getSchemaVersions("mygroup", schemaInfo.getType())); + assertEquals(result.size(), 1); + assertEquals(versionInfo, result.get(0).getVersionInfo()); + assertEquals(schemaInfo, result.get(0).getSchemaInfo()); + + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> Lists.newArrayList(client.getSchemaVersions("mygroup", null)), + e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> Lists.newArrayList(client.getSchemaVersions("mygroup", null)), e -> e instanceof InternalServerError); + } + + @Test + public void testValidateSchema() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).validate(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + Valid valid = new Valid().valid(Boolean.TRUE); + doReturn(valid).when(response).readEntity(Valid.class); + Boolean valid1 = client.validateSchema("mygroup", schemaInfo); + assertEquals(valid.isValid(), valid1); + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.validateSchema("mygroup", schemaInfo), e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.validateSchema("mygroup", schemaInfo), e -> e instanceof InternalServerError); + } + + @Test + public void testCanRead() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).canRead(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + CanRead canRead = new CanRead().compatible(Boolean.TRUE); + doReturn(canRead).when(response).readEntity(CanRead.class); + Boolean canRead1 = client.canReadUsing("mygroup", schemaInfo); + assertEquals(canRead.isCompatible(), canRead1); + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.canReadUsing("mygroup", schemaInfo), e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.canReadUsing("mygroup", schemaInfo), e -> e instanceof InternalServerError); + } + + @Test + public void testGetCodecTypes() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getCodecTypesList(anyString()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + String codecType = "gzip"; + String codecType1 = "snappy"; + CodecTypesList codecTypesList = new CodecTypesList(); + codecTypesList.addCodecTypesItem(codecType); + codecTypesList.addCodecTypesItem(codecType1); + doReturn(codecTypesList).when(response).readEntity(CodecTypesList.class); + List codecTypesList1 = client.getCodecTypes("mygroup"); + assertEquals(2, codecTypesList1.size()); + assertEquals("gzip", codecTypesList1.get(0)); + assertEquals("snappy", codecTypesList1.get(1)); + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getCodecTypes("mygroup"), e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getCodecTypes("mygroup"), e -> e instanceof InternalServerError); + } + + @Test + public void testAddCodecType() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).addCodecType(anyString(), any()); + + doReturn(Response.Status.CREATED.getStatusCode()).when(response).getStatus(); + String codecType = "gzip"; + client.addCodecType("mygroup", codecType); + assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatus()); + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.addCodecType("mygroup", codecType), e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.addCodecType("mygroup", codecType), e -> e instanceof InternalServerError); + } +} diff --git a/common/src/main/java/io/pravega/schemaregistry/common/ContinuationTokenIterator.java b/common/src/main/java/io/pravega/schemaregistry/common/ContinuationTokenIterator.java new file mode 100644 index 000000000..ff998e60d --- /dev/null +++ b/common/src/main/java/io/pravega/schemaregistry/common/ContinuationTokenIterator.java @@ -0,0 +1,93 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.common; + +import lombok.Synchronized; + +import javax.annotation.concurrent.GuardedBy; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Queue; +import java.util.Set; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.function.Function; + +/** + * Continuation token iterator which fetches a batch of values using the loading function. Once those values have been + * iterated over, it uses the continuation token to read more values using the loading function until the function does + * not return a value. + * @param Type of value. + * @param Type of continuation token. + */ +public class ContinuationTokenIterator implements Iterator { + @GuardedBy("$lock") + private final Queue queue; + private final Function>> loadingFunction; + @GuardedBy("lock") + private Token token; + @GuardedBy("$lock") + private T next; + @GuardedBy("$lock") + private boolean canHaveNext; + @GuardedBy("$lock") + private final Set tokens; + + public ContinuationTokenIterator(Function>> loadingFunction, Token tokenIdentity) { + this.loadingFunction = loadingFunction; + this.queue = new LinkedBlockingQueue(); + this.token = tokenIdentity; + this.canHaveNext = true; + this.next = null; + this.tokens = new HashSet<>(); + } + + @Synchronized + private void load() { + next = next == null ? queue.poll() : next; + while (next == null && canHaveNext) { + Map.Entry> result = loadingFunction.apply(token); + boolean tokenUpdated = result.getKey() != null && !tokens.contains(result.getKey()); + if (result.getKey() != null) { + tokens.add(result.getKey()); + } + token = result.getKey(); + + queue.addAll(result.getValue()); + next = queue.poll(); + if (next == null) { + canHaveNext = tokenUpdated; + } + } + } + + @Synchronized + @Override + public boolean hasNext() { + load(); + return canHaveNext; + } + + @Synchronized + @Override + public T next() { + load(); + if (next != null) { + T retVal = next; + next = null; + return retVal; + } else { + assert !canHaveNext; + throw new NoSuchElementException(); + } + } +} diff --git a/common/src/main/java/io/pravega/schemaregistry/common/Either.java b/common/src/main/java/io/pravega/schemaregistry/common/Either.java new file mode 100644 index 000000000..212ad99fe --- /dev/null +++ b/common/src/main/java/io/pravega/schemaregistry/common/Either.java @@ -0,0 +1,51 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.common; + +import com.google.common.base.Preconditions; +import lombok.Data; + +/** + * A holder object consisting of either of two elements. + * + * The objects could be of any type. Exactly one of the values will exist while the other will be null. + * If a mutable object is stored in 'Either', then 'Either' itself effectively becomes mutable. + * + * @param the left element type. + * @param the right element type. + */ +@Data +public class Either { + private final T left; + private final K right; + + private Either(T left, K right) { + this.left = left; + this.right = right; + } + + public static Either left(T t) { + Preconditions.checkNotNull(t); + return new Either(t, null); + } + + public static Either right(K k) { + Preconditions.checkNotNull(k); + return new Either(null, k); + } + + public boolean isLeft() { + return left != null; + } + + public boolean isRight() { + return right != null; + } +} diff --git a/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java b/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java new file mode 100644 index 000000000..3875cbb44 --- /dev/null +++ b/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java @@ -0,0 +1,21 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.common; + +import com.google.common.hash.HashFunction; +import com.google.common.hash.Hashing; + +public class HashUtil { + private static final HashFunction HASH = Hashing.murmur3_128(); + + public static long getFingerprint(byte[] bytes) { + return HASH.hashBytes(bytes).asLong(); + } +} diff --git a/common/src/test/java/io/pravega/schemaregistry/common/ContinuationTokenIteratorTest.java b/common/src/test/java/io/pravega/schemaregistry/common/ContinuationTokenIteratorTest.java new file mode 100644 index 000000000..89989512a --- /dev/null +++ b/common/src/test/java/io/pravega/schemaregistry/common/ContinuationTokenIteratorTest.java @@ -0,0 +1,68 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.common; + +import com.google.common.collect.Lists; +import lombok.Data; +import org.junit.Test; + +import java.util.AbstractMap; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.function.Function; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +public class ContinuationTokenIteratorTest { + @Test + public void test() { + // 1. call method 1st call returns - list of 5 items + new token + // verify that call method is not called until all 10 are read. + // 2. call returns empty list + new token + // 3. call returns empty list + new token + // 4. call returns list of 10 items + new token + // verify that we consume 10 items without calling the callmethod + // 5. call returns empty list + same token. --> this should exit + Queue responses = spy(new LinkedBlockingQueue<>()); + responses.add(new ListWithToken(Lists.newArrayList(1, 2, 3, 4, 5), "1")); + responses.add(new ListWithToken(Collections.emptyList(), "2")); + responses.add(new ListWithToken(Collections.emptyList(), "3")); + responses.add(new ListWithToken(Lists.newArrayList(6, 7, 8, 9, 10), "4")); + responses.add(new ListWithToken(Collections.emptyList(), "4")); + Function>> func = token -> { + ListWithToken result = responses.poll(); + return new AbstractMap.SimpleEntry<>(result.token, result.list); + }; + ContinuationTokenIterator myIterator = new ContinuationTokenIterator<>(func, null); + for (int i = 0; i < 5; i++) { + assertTrue(myIterator.hasNext()); + assertEquals(myIterator.next().intValue(), i + 1); + } + verify(responses, times(1)).poll(); + for (int i = 5; i < 10; i++) { + assertTrue(myIterator.hasNext()); + assertEquals(myIterator.next().intValue(), i + 1); + } + verify(responses, times(4)).poll(); + assertFalse(myIterator.hasNext()); + verify(responses, times(5)).poll(); + } + + @Data + static class ListWithToken { + private final List list; + private final String token; + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java new file mode 100644 index 000000000..f8717337c --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java @@ -0,0 +1,203 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import io.pravega.common.ObjectBuilder; +import lombok.Builder; +import lombok.Data; + +/** + * Defines different Compatibility policy options for schema evolution for schemas within a group. + * The choice of compatibility policy tells the Schema Registry service whether a schema should be accepted to evolve + * into new schema by comparing it with one or more existing versions of the schema. + * + * {@link Type#AllowAny}: allow any changes to schema without any checks performed by the registry. + * {@link Type#DenyAll}: disables any changes to the schema for the group. + * {@link Type#Backward}: a new schema can be used to read data written by previous schema. + * {@link Type#BackwardTransitive}: a new schema can be used read data written by any of previous schemas. + * {@link Type#BackwardTill}: a new schema can be used to read data written by any of previous schemas till schema + * identified by version {@link Compatibility#backwardTill}. + * {@link Type#Forward}: previous schema can be used to read data written by new schema. + * {@link Type#ForwardTransitive}: all previous schemas can read data written by new schema. + * {@link Type#ForwardTill}: All previous schemas till schema identified by version {@link Compatibility#forwardTill} + * can read data written by new schema. + * {@link Type#Full}: both backward and forward compatibility. + * {@link Type#FullTransitive}: both backward and forward compatibility with all previous schemas. + * {@link Type#BackwardAndForwardTill}: All previous schemas till schema identified by version {@link Compatibility#forwardTill} + * can read data written by new schema. New schema can be used to read data written by any of previous schemas till schema + * identified by version {@link Compatibility#backwardTill}. + */ +@Data +@Builder +public class Compatibility implements SchemaValidationRule { + /** + * Enum that defines the Type of compatibility policy. + */ + private final Type compatibility; + /** + * Version info to be specified if the compatibility policy choic.e is either {@link Type#backwardTill} or + * {@link Type#backwardTillAndForwardTill}. + */ + private final VersionInfo backwardTill; + /** + * Version info to be specified if the compatibility policy choice is either {@link Type#forwardTill} or + * {@link Type#backwardTillAndForwardTill}. + */ + private final VersionInfo forwardTill; + + private Compatibility(Type compatibility) { + this(compatibility, null, null); + } + + public Compatibility(Type compatibility, VersionInfo backwardTill, VersionInfo forwardTill) { + this.compatibility = compatibility; + this.backwardTill = backwardTill; + this.forwardTill = forwardTill; + } + + @Override + public String getName() { + return Compatibility.class.getSimpleName(); + } + + public enum Type { + AllowAny, + DenyAll, + Backward, + BackwardTill, + BackwardTransitive, + Forward, + ForwardTill, + ForwardTransitive, + BackwardAndForwardTill, + Full, + FullTransitive; + } + + /** + * Method to create a compatibility policy of type backward. Backward policy implies new schema will be validated + * to be capable of reading data written using the previous schema. + * + * @return Compatibility with Type.Backward. + */ + public static Compatibility backward() { + return new Compatibility(Type.Backward); + } + + /** + * Method to create a compatibility policy of type backward till. BackwardTill policy implies new schema will be validated + * to be capable of reading data written using the all previous schemas till version supplied as input. + * + * @param backwardTill version till which schemas should be checked for compatibility. + * @return Compatibility with Type.BackwardTill version. + */ + public static Compatibility backwardTill(VersionInfo backwardTill) { + return new Compatibility(Type.BackwardTill, backwardTill, null); + } + + /** + * Method to create a compatibility policy of type backward transitive. Backward transitive policy implies + * new schema will be validated to be capable of reading data written using the all previous schemas versions. + * + * @return Compatibility with Type.BackwardTransitive. + */ + public static Compatibility backwardTransitive() { + return new Compatibility(Type.BackwardTransitive); + } + + /** + * Method to create a compatibility policy of type forward. Forward policy implies new schema will be validated + * such that data written using new schema can be read using the previous schema. + * + * @return Compatibility with Type.Forward + */ + public static Compatibility forward() { + return new Compatibility(Type.Forward); + } + + /** + * Method to create a compatibility policy of type forward till. Forward policy implies new schema will be validated + * such that data written using new schema can be read using the all previous schemas till supplied version. + * + * @param forwardTill version till which schemas should be checked for compatibility. + * @return Compatibility with Type.ForwardTill version. + */ + public static Compatibility forwardTill(VersionInfo forwardTill) { + return new Compatibility(Type.ForwardTill, null, forwardTill); + } + + /** + * Method to create a compatibility policy of type forward transitive. + * Forward transitive policy implies new schema will be validated such that data written using new schema + * can be read using all previous schemas. + * + * @return Compatibility with Type.ForwardTransitive. + */ + public static Compatibility forwardTransitive() { + return new Compatibility(Type.ForwardTransitive); + } + + /** + * Method to create a compatibility policy of type full. Full means backward and forward compatibility check with + * previous schema version. Which means new schema can be used to read data written with previous schema and vice versa. + * + * @return Compatibility with Type.Full. + */ + public static Compatibility full() { + return new Compatibility(Type.Full); + } + + /** + * Method to create a compatibility policy of type full transitive. + * Full transitive means backward and forward compatibility check with all previous schema version. + * This implies new schema can be used to read data written with any of the previous schemas and vice versa. + * + * @return Compatibility with Type.FullTransitive. + */ + public static Compatibility fullTransitive() { + return new Compatibility(Type.FullTransitive); + } + + /** + * Method to create a compatibility policy of type backward till and forward till. This is a combination of + * backward till and forward till policies. + * All previous schemas till schema identified by version {@link Compatibility#forwardTill} + * can read data written by new schema. New schema can be used to read data written by any of previous schemas till schema + * identified by version {@link Compatibility#backwardTill}. + * + * @param backwardTill version till which backward compatibility is checked for. + * @param forwardTill version till which forward compatibility is checked for. + * @return Compatibility with Type.FullTransitive. + */ + public static Compatibility backwardTillAndForwardTill(VersionInfo backwardTill, VersionInfo forwardTill) { + return new Compatibility(Type.BackwardAndForwardTill, backwardTill, forwardTill); + } + + /** + * Disable compatibility check and all any schema to be registered. Effectively declares all schemas as compatible. + * + * @return Compatibility with Type.AllowAny + */ + public static Compatibility allowAny() { + return new Compatibility(Type.AllowAny); + } + + /** + * Compatibility policy that disallows any new schema changes. Effecfively rejects all schemas and declares them incompatible. + * + * @return Compatibility with Type.DenyAll + */ + public static Compatibility denyAll() { + return new Compatibility(Type.DenyAll); + } + + public static class CompatibilityBuilder implements ObjectBuilder { + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingId.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingId.java new file mode 100644 index 000000000..2d1d625ca --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingId.java @@ -0,0 +1,35 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import io.pravega.common.ObjectBuilder; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; + +/** + * For each group unique set of Encoding Ids are generated for each unique combination of schema version and codec types + * registered in the group. + * The encoding id will typically be attached to the encoded data in a header to describe how to parse the following data. + * The registry service exposes APIs to resolve encoding id to {@link EncodingInfo} objects that include details about the + * encoding used. + */ +@Data +@Builder +@AllArgsConstructor +public class EncodingId { + /** + * A 4byte id that uniquely identifies a {@link VersionInfo} and codecType pair. + */ + private final int id; + + public static class EncodingIdBuilder implements ObjectBuilder { + } +} \ No newline at end of file diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java new file mode 100644 index 000000000..f5e396ea2 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java @@ -0,0 +1,33 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import lombok.Data; + +/** + * Encoding Info describes the details of encoding for each event payload. Each combination of schema version and codec type + * is uniquely identified by an {@link EncodingId}. + * The registry service exposes APIs to generate or resolve {@link EncodingId} to {@link EncodingInfo}. + */ +@Data +public class EncodingInfo { + /** + * Version of the schema which is used in encoding the data. + */ + private final VersionInfo versionInfo; + /** + * Actual schema which is used in encoding the data. + */ + private final SchemaInfo schemaInfo; + /** + * Codec type which is used in encoding the data. + */ + private final String codecType; +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java new file mode 100644 index 000000000..4b9a3d257 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java @@ -0,0 +1,47 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import lombok.Data; + +/** + * Describes changes to the group and the validation rules {@link GroupHistoryRecord#rules} that were + * applied while registering {@link GroupHistoryRecord#schema} and the unique {@link GroupHistoryRecord#version} identifier + * that was assigned to it. + * It also has {@link GroupHistoryRecord#timestamp} when the schema was added and includes an optional + * {@link GroupHistoryRecord#schemaString} which is populated only if serialization format is one of {@link SerializationFormat#Avro} + * {@link SerializationFormat#Json} or {@link SerializationFormat#Protobuf}. This string is just to help make the schema human readable. + */ +@Data +public class GroupHistoryRecord { + /** + * Schema information object for the schema that was added to the group. + */ + private final SchemaInfo schema; + /** + * Version information object that uniquely identifies the schema in the group. + */ + private final VersionInfo version; + /** + * Validation rules that were applied at the time when the schema was registered. + */ + private final SchemaValidationRules rules; + /** + * Service's Time when the schema was registered. + */ + private final long timestamp; + /** + * A json format string representing the schema. This string will be populated only for serialization formats + * that the service can parse. + */ + private final String schemaString; +} + + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupProperties.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupProperties.java new file mode 100644 index 000000000..4002ceebb --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupProperties.java @@ -0,0 +1,74 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import com.google.common.collect.ImmutableMap; +import lombok.Builder; +import lombok.Data; + +/** + * Different configuration choices for a group. + * + * {@link GroupProperties#serializationFormat} identifies the serialization format used to describe the schema. + * {@link GroupProperties#schemaValidationRules} sets the schema validation policy that needs to be enforced for evolving schemas. + * {@link GroupProperties#allowMultipleTypes} that specifies if multiple schemas with distinct {@link SchemaInfo#type} + * are allowed to coexist within the group. A schema describes an object and each object type is distinctly identified by + * {@link SchemaInfo#type}. Registry service validates new schema with existing schema versions of the same name and versions + * it accordingly. Allowing multiple schemas, each versioned independently, allows applications to use schema registry groups + * for streaming scenarios like event sourcing, or message bus where different types of events could be written to the same + * stream. Similarly, a group with multiple schemas can be used to describe a database catalog with each schema representing + * a different table. + * The users can register new versions of each distinct type of schema, and the registry will check for compatibility + * for each type independently. + * {@link GroupProperties#properties} This is general purpose key value string to include any additional user defined information for the group. + */ +@Builder +@Data +public class GroupProperties { + /** + * Serialization format allowed for the group. + */ + private final SerializationFormat serializationFormat; + /** + * Schema validation rules to be applied for the group. + */ + private final SchemaValidationRules schemaValidationRules; + /** + * Flag to indicate whether multiple types of schemas can be added to the group or not. If set to false, all schemas + * added to the group should have the same {@link SchemaInfo#type}. + */ + private final boolean allowMultipleTypes; + /** + * User defined key value strings for any metadata they want to associate with the group. + */ + private final ImmutableMap properties; + + public GroupProperties(SerializationFormat serializationFormat, SchemaValidationRules schemaValidationRules, boolean allowMultipleTypes) { + this(serializationFormat, schemaValidationRules, allowMultipleTypes, ImmutableMap.of()); + } + + public GroupProperties(SerializationFormat serializationFormat, SchemaValidationRules schemaValidationRules, boolean allowMultipleTypes, ImmutableMap properties) { + this.serializationFormat = serializationFormat; + this.schemaValidationRules = schemaValidationRules; + this.allowMultipleTypes = allowMultipleTypes; + this.properties = properties; + } + + public static final class GroupPropertiesBuilder { + private SchemaValidationRules schemaValidationRules = SchemaValidationRules.of(Compatibility.fullTransitive()); + private boolean allowMultipleTypes = false; + private ImmutableMap properties = ImmutableMap.of(); + + public GroupPropertiesBuilder compatibility(Compatibility compatibility) { + this.schemaValidationRules = SchemaValidationRules.of(compatibility); + return this; + } + } +} \ No newline at end of file diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaInfo.java new file mode 100644 index 000000000..bc4ed9f62 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaInfo.java @@ -0,0 +1,62 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import io.pravega.common.ObjectBuilder; +import lombok.Builder; +import lombok.Data; + +import java.nio.ByteBuffer; + +/** + * Encapsulates properties of a schema. + * {@link SchemaInfo#type} object type represented by the schema. This is used to identify the exact object type. + * If (ref: {@link GroupProperties#allowMultipleTypes}) is set to true, the group will allow multiple schemas to coexist. + * {@link SchemaInfo#serializationFormat} Serialization format. + * {@link SchemaInfo#schemaData} Schema as an array of 8-bit unsigned bytes. This is schema-type specific and to be consumed + * by schema-type specific parsers. + * {@link SchemaInfo#properties} A key value map of strings where user defined metadata can be recorded with schemas. + * This is not interpreted by the registry service or client and can be used by applications for sharing any additional + * application specific information with the schema. + */ +@Data +@Builder +public class SchemaInfo { + /** + * Identifies the object type that is represented by the schema. + */ + private final String type; + /** + * Serialization format that this schema is intended to be used for. + */ + private final SerializationFormat serializationFormat; + /** + * Schema as an array of 8-bit unsigned bytes. + */ + private final ByteBuffer schemaData; + /** + * User defined key value strings that users can use to add any additional metadata to the schema. + */ + private final ImmutableMap properties; + + public SchemaInfo(String type, SerializationFormat serializationFormat, ByteBuffer schemaData, ImmutableMap properties) { + Preconditions.checkArgument(type != null); + Preconditions.checkArgument(serializationFormat != SerializationFormat.Any); + this.type = type; + this.serializationFormat = serializationFormat; + this.schemaData = schemaData; + this.properties = properties; + } + + public static class SchemaInfoBuilder implements ObjectBuilder { + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRule.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRule.java new file mode 100644 index 000000000..c89670543 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRule.java @@ -0,0 +1,23 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +/** + * Base interface to define all schema validation rules. Schema validation rules are applied whenever new schemas are registered + * and only schemas that satisfy validation rules are accepted by the registry into the group. + */ +public interface SchemaValidationRule { + /** + * Name of the rule to identify it with. + * + * @return name of the rule. + */ + String getName(); +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java new file mode 100644 index 000000000..06f77fb09 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java @@ -0,0 +1,66 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import com.google.common.base.Preconditions; +import io.pravega.common.ObjectBuilder; +import lombok.Builder; +import lombok.Data; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Schema validation rules that are applied for checking if a schema is valid. + * This contains a set of rules {@link SchemaValidationRule}. Currently the only rule that is supported is {@link Compatibility}. + * The schema will be compared against one or more existing schemas in the group by checking it for satisfying each of the + * rules. + */ +@Data +@Builder +public class SchemaValidationRules { + /** + * Map of schema validation rule name to corresponding schema validation rule. + */ + private final Map rules; + + private SchemaValidationRules(Map rules) { + this.rules = rules; + } + + /** + * Method to create a rule for compatibility. + * + * @param compatibility compatibility policy to be used. + * @return A singleton rules map containing the compatibility rule. + */ + public static SchemaValidationRules of(Compatibility compatibility) { + return new SchemaValidationRules(Collections.singletonMap(compatibility.getName(), compatibility)); + } + + /** + * Method to create SchemaValidationRules from the list of supplied rules. If multiple same rule are present + * in the list then only the latest rule of each type is added to the Rules map. + * Currently the only rule supported is {@link Compatibility}. + * @param rules List of rules. + * @return SchemaValidationRules object. + */ + public static SchemaValidationRules of(List rules) { + Preconditions.checkNotNull(rules); + Preconditions.checkArgument(rules.stream().allMatch(x -> x instanceof Compatibility), "Only compatibility rule is supported."); + return new SchemaValidationRules(rules.stream().collect(Collectors.toMap(SchemaValidationRule::getName, x -> x))); + } + + public static class SchemaValidationRulesBuilder implements ObjectBuilder { + } + +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaWithVersion.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaWithVersion.java new file mode 100644 index 000000000..45e730cbf --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaWithVersion.java @@ -0,0 +1,31 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; + +/** + * Object that encapsulates schemaInfo with its associated version. + */ +@Data +@Builder +@AllArgsConstructor +public class SchemaWithVersion { + /** + * Schema Information object. + */ + private final SchemaInfo schemaInfo; + /** + * Version information object that identifies the corresponding schema object. + */ + private final VersionInfo versionInfo; +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java new file mode 100644 index 000000000..cecb9b257 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java @@ -0,0 +1,45 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import lombok.AccessLevel; +import lombok.Getter; +import lombok.Setter; + +/** + * Different types of serialization formats used for serializing data. + * Registry supports Avro, Protobuf and Json serialization formats but any custom type could be used with the registry using custom type. + * + * If a serialization format is not present in the enum it can be specified using {@link SerializationFormat#custom} with {@link SerializationFormat#customTypeName}. + * Allowed values of {@link Compatibility} mode with custom type are AllowAny or DenyAll. + */ + +public enum SerializationFormat { + Avro, + Protobuf, + Json, + Any, + Custom; + + @Getter + @Setter(AccessLevel.PRIVATE) + private String customTypeName; + + /** + * Method to define a custom serialization format with a custom name. + * @param customTypeName Custom type name. + * @return {@link SerializationFormat#Custom} with supplied custom type name. + */ + public static SerializationFormat custom(String customTypeName) { + SerializationFormat type = SerializationFormat.Custom; + type.setCustomTypeName(customTypeName); + return type; + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java new file mode 100644 index 000000000..c281e75e3 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java @@ -0,0 +1,47 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import io.pravega.common.ObjectBuilder; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; + +/** + * Version information object that encapsulates properties that uniquely identify a specific version of a schema within a group. + * + * {@link VersionInfo#type} is same as {@link SchemaInfo#type} which represents the object type for which the version is computed. + * {@link VersionInfo#version} the registry assigned monotonically increasing version number for the schema for specific object type. + * Since the version number is per object type, so type and version number forms a unique pair. + * {@link VersionInfo#ordinal} Absolute ordinal of the schema for all schemas in the group. This uniquely identifies the + * version within a group. + */ +@Data +@Builder +@AllArgsConstructor +public class VersionInfo { + /** + * Object type which is declared in the corresponding {@link SchemaInfo#type} for the schemainfo that is identified + * by this version info. + */ + private final String type; + /** + * A version number that identifies the position of schema among other schemas in the group that share the same 'type'. + */ + private final int version; + /** + * A position identifier that uniquely identifies the schema within a group and represents the order in which this + * schema was included in the group. + */ + private final int ordinal; + + public static class VersionInfoBuilder implements ObjectBuilder { + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/AddedTo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/AddedTo.java new file mode 100644 index 000000000..310f78ff8 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/AddedTo.java @@ -0,0 +1,101 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.validation.constraints.*; + +/** + * Map of Group names to versionInfos in the group. This is for all the groups where the schema is registered. + */ +@ApiModel(description = "Map of Group names to versionInfos in the group. This is for all the groups where the schema is registered.") + +public class AddedTo { + @JsonProperty("groups") + private Map groups = new HashMap(); + + public AddedTo groups(Map groups) { + this.groups = groups; + return this; + } + + public AddedTo putGroupsItem(String key, VersionInfo groupsItem) { + this.groups.put(key, groupsItem); + return this; + } + + /** + * Get groups + * @return groups + **/ + @JsonProperty("groups") + @ApiModelProperty(required = true, value = "") + @NotNull + public Map getGroups() { + return groups; + } + + public void setGroups(Map groups) { + this.groups = groups; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + AddedTo addedTo = (AddedTo) o; + return Objects.equals(this.groups, addedTo.groups); + } + + @Override + public int hashCode() { + return Objects.hash(groups); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class AddedTo {\n"); + + sb.append(" groups: ").append(toIndentedString(groups)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CanRead.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CanRead.java new file mode 100644 index 000000000..5f101741a --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CanRead.java @@ -0,0 +1,92 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Response object for canRead api. + */ +@ApiModel(description = "Response object for canRead api.") + +public class CanRead { + @JsonProperty("compatible") + private Boolean compatible = null; + + public CanRead compatible(Boolean compatible) { + this.compatible = compatible; + return this; + } + + /** + * Whether given schema is compatible and can be used for reads. Compatibility is checked against existing group schemas subject to group's configured compatibility policy. + * @return compatible + **/ + @JsonProperty("compatible") + @ApiModelProperty(required = true, value = "Whether given schema is compatible and can be used for reads. Compatibility is checked against existing group schemas subject to group's configured compatibility policy.") + @NotNull + public Boolean isCompatible() { + return compatible; + } + + public void setCompatible(Boolean compatible) { + this.compatible = compatible; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + CanRead canRead = (CanRead) o; + return Objects.equals(this.compatible, canRead.compatible); + } + + @Override + public int hashCode() { + return Objects.hash(compatible); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class CanRead {\n"); + + sb.append(" compatible: ").append(toIndentedString(compatible)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java new file mode 100644 index 000000000..96c10bacc --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java @@ -0,0 +1,101 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.ArrayList; +import java.util.List; +import javax.validation.constraints.*; + +/** + * Response object for listCodecTypes. + */ +@ApiModel(description = "Response object for listCodecTypes.") + +public class CodecTypesList { + @JsonProperty("codecTypes") + private List codecTypes = null; + + public CodecTypesList codecTypes(List codecTypes) { + this.codecTypes = codecTypes; + return this; + } + + public CodecTypesList addCodecTypesItem(String codecTypesItem) { + if (this.codecTypes == null) { + this.codecTypes = new ArrayList(); + } + this.codecTypes.add(codecTypesItem); + return this; + } + + /** + * List of codecTypes. + * @return codecTypes + **/ + @JsonProperty("codecTypes") + @ApiModelProperty(value = "List of codecTypes.") + public List getCodecTypes() { + return codecTypes; + } + + public void setCodecTypes(List codecTypes) { + this.codecTypes = codecTypes; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + CodecTypesList codecTypesList = (CodecTypesList) o; + return Objects.equals(this.codecTypes, codecTypesList.codecTypes); + } + + @Override + public int hashCode() { + return Objects.hash(codecTypes); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class CodecTypesList {\n"); + + sb.append(" codecTypes: ").append(toIndentedString(codecTypes)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java new file mode 100644 index 000000000..459893324 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java @@ -0,0 +1,216 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Schema Compatibility validation rule. + */ +@ApiModel(description = "Schema Compatibility validation rule.") + +public class Compatibility { + @JsonProperty("name") + private String name = null; + + /** + * Compatibility policy enum. + */ + public enum PolicyEnum { + ALLOWANY("AllowAny"), + + DENYALL("DenyAll"), + + BACKWARD("Backward"), + + FORWARD("Forward"), + + FORWARDTRANSITIVE("ForwardTransitive"), + + BACKWARDTRANSITIVE("BackwardTransitive"), + + BACKWARDTILL("BackwardTill"), + + FORWARDTILL("ForwardTill"), + + BACKWARDANDFORWARDTILL("BackwardAndForwardTill"), + + FULL("Full"), + + FULLTRANSITIVE("FullTransitive"); + + private String value; + + PolicyEnum(String value) { + this.value = value; + } + + @Override + @JsonValue + public String toString() { + return String.valueOf(value); + } + + @JsonCreator + public static PolicyEnum fromValue(String text) { + for (PolicyEnum b : PolicyEnum.values()) { + if (String.valueOf(b.value).equals(text)) { + return b; + } + } + return null; + } + } + + @JsonProperty("policy") + private PolicyEnum policy = null; + + @JsonProperty("backwardTill") + private VersionInfo backwardTill = null; + + @JsonProperty("forwardTill") + private VersionInfo forwardTill = null; + + public Compatibility name(String name) { + this.name = name; + return this; + } + + /** + * Name is used to identify the type of SchemaValidationRule. For Compatibility rule the name should be \"Compatibility\". + * @return name + **/ + @JsonProperty("name") + @ApiModelProperty(required = true, value = "Name is used to identify the type of SchemaValidationRule. For Compatibility rule the name should be \"Compatibility\".") + @NotNull + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Compatibility policy(PolicyEnum policy) { + this.policy = policy; + return this; + } + + /** + * Compatibility policy enum. + * @return policy + **/ + @JsonProperty("policy") + @ApiModelProperty(required = true, value = "Compatibility policy enum.") + @NotNull + public PolicyEnum getPolicy() { + return policy; + } + + public void setPolicy(PolicyEnum policy) { + this.policy = policy; + } + + public Compatibility backwardTill(VersionInfo backwardTill) { + this.backwardTill = backwardTill; + return this; + } + + /** + * Version for backward till if policy is BackwardTill or BackwardAndForwardTill. + * @return backwardTill + **/ + @JsonProperty("backwardTill") + @ApiModelProperty(value = "Version for backward till if policy is BackwardTill or BackwardAndForwardTill.") + public VersionInfo getBackwardTill() { + return backwardTill; + } + + public void setBackwardTill(VersionInfo backwardTill) { + this.backwardTill = backwardTill; + } + + public Compatibility forwardTill(VersionInfo forwardTill) { + this.forwardTill = forwardTill; + return this; + } + + /** + * Version for forward till if policy is ForwardTill or BackwardAndForwardTill. + * @return forwardTill + **/ + @JsonProperty("forwardTill") + @ApiModelProperty(value = "Version for forward till if policy is ForwardTill or BackwardAndForwardTill.") + public VersionInfo getForwardTill() { + return forwardTill; + } + + public void setForwardTill(VersionInfo forwardTill) { + this.forwardTill = forwardTill; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Compatibility compatibility = (Compatibility) o; + return Objects.equals(this.name, compatibility.name) && + Objects.equals(this.policy, compatibility.policy) && + Objects.equals(this.backwardTill, compatibility.backwardTill) && + Objects.equals(this.forwardTill, compatibility.forwardTill); + } + + @Override + public int hashCode() { + return Objects.hash(name, policy, backwardTill, forwardTill); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class Compatibility {\n"); + + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" policy: ").append(toIndentedString(policy)).append("\n"); + sb.append(" backwardTill: ").append(toIndentedString(backwardTill)).append("\n"); + sb.append(" forwardTill: ").append(toIndentedString(forwardTill)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CreateGroupRequest.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CreateGroupRequest.java new file mode 100644 index 000000000..22d2b8b29 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CreateGroupRequest.java @@ -0,0 +1,117 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * CreateGroupRequest + */ + +public class CreateGroupRequest { + @JsonProperty("groupName") + private String groupName = null; + + @JsonProperty("groupProperties") + private GroupProperties groupProperties = null; + + public CreateGroupRequest groupName(String groupName) { + this.groupName = groupName; + return this; + } + + /** + * Get groupName + * @return groupName + **/ + @JsonProperty("groupName") + @ApiModelProperty(required = true, value = "") + @NotNull + public String getGroupName() { + return groupName; + } + + public void setGroupName(String groupName) { + this.groupName = groupName; + } + + public CreateGroupRequest groupProperties(GroupProperties groupProperties) { + this.groupProperties = groupProperties; + return this; + } + + /** + * Get groupProperties + * @return groupProperties + **/ + @JsonProperty("groupProperties") + @ApiModelProperty(required = true, value = "") + @NotNull + public GroupProperties getGroupProperties() { + return groupProperties; + } + + public void setGroupProperties(GroupProperties groupProperties) { + this.groupProperties = groupProperties; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + CreateGroupRequest createGroupRequest = (CreateGroupRequest) o; + return Objects.equals(this.groupName, createGroupRequest.groupName) && + Objects.equals(this.groupProperties, createGroupRequest.groupProperties); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, groupProperties); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class CreateGroupRequest {\n"); + + sb.append(" groupName: ").append(toIndentedString(groupName)).append("\n"); + sb.append(" groupProperties: ").append(toIndentedString(groupProperties)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingId.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingId.java new file mode 100644 index 000000000..50f95270c --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingId.java @@ -0,0 +1,92 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Encoding id that uniquely identifies a schema version and codec type pair. + */ +@ApiModel(description = "Encoding id that uniquely identifies a schema version and codec type pair.") + +public class EncodingId { + @JsonProperty("encodingId") + private Integer encodingId = null; + + public EncodingId encodingId(Integer encodingId) { + this.encodingId = encodingId; + return this; + } + + /** + * encoding id generated by service. + * @return encodingId + **/ + @JsonProperty("encodingId") + @ApiModelProperty(required = true, value = "encoding id generated by service.") + @NotNull + public Integer getEncodingId() { + return encodingId; + } + + public void setEncodingId(Integer encodingId) { + this.encodingId = encodingId; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + EncodingId encodingId = (EncodingId) o; + return Objects.equals(this.encodingId, encodingId.encodingId); + } + + @Override + public int hashCode() { + return Objects.hash(encodingId); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class EncodingId {\n"); + + sb.append(" encodingId: ").append(toIndentedString(encodingId)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingInfo.java new file mode 100644 index 000000000..1276ec038 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingInfo.java @@ -0,0 +1,144 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Encoding information object that resolves the schema version and codec type used for corresponding encoding id. + */ +@ApiModel(description = "Encoding information object that resolves the schema version and codec type used for corresponding encoding id.") + +public class EncodingInfo { + @JsonProperty("schemaInfo") + private SchemaInfo schemaInfo = null; + + @JsonProperty("versionInfo") + private VersionInfo versionInfo = null; + + @JsonProperty("codecType") + private String codecType = null; + + public EncodingInfo schemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + return this; + } + + /** + * Schema information object. + * @return schemaInfo + **/ + @JsonProperty("schemaInfo") + @ApiModelProperty(required = true, value = "Schema information object.") + @NotNull + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + public void setSchemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + } + + public EncodingInfo versionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; + return this; + } + + /** + * Version information object. + * @return versionInfo + **/ + @JsonProperty("versionInfo") + @ApiModelProperty(required = true, value = "Version information object.") + @NotNull + public VersionInfo getVersionInfo() { + return versionInfo; + } + + public void setVersionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; + } + + public EncodingInfo codecType(String codecType) { + this.codecType = codecType; + return this; + } + + /** + * Codec type. + * @return codecType + **/ + @JsonProperty("codecType") + @ApiModelProperty(required = true, value = "Codec type.") + @NotNull + public String getCodecType() { + return codecType; + } + + public void setCodecType(String codecType) { + this.codecType = codecType; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + EncodingInfo encodingInfo = (EncodingInfo) o; + return Objects.equals(this.schemaInfo, encodingInfo.schemaInfo) && + Objects.equals(this.versionInfo, encodingInfo.versionInfo) && + Objects.equals(this.codecType, encodingInfo.codecType); + } + + @Override + public int hashCode() { + return Objects.hash(schemaInfo, versionInfo, codecType); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class EncodingInfo {\n"); + + sb.append(" schemaInfo: ").append(toIndentedString(schemaInfo)).append("\n"); + sb.append(" versionInfo: ").append(toIndentedString(versionInfo)).append("\n"); + sb.append(" codecType: ").append(toIndentedString(codecType)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GetEncodingIdRequest.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GetEncodingIdRequest.java new file mode 100644 index 000000000..6376af636 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GetEncodingIdRequest.java @@ -0,0 +1,117 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * GetEncodingIdRequest + */ + +public class GetEncodingIdRequest { + @JsonProperty("versionInfo") + private VersionInfo versionInfo = null; + + @JsonProperty("codecType") + private String codecType = null; + + public GetEncodingIdRequest versionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; + return this; + } + + /** + * Get versionInfo + * @return versionInfo + **/ + @JsonProperty("versionInfo") + @ApiModelProperty(required = true, value = "") + @NotNull + public VersionInfo getVersionInfo() { + return versionInfo; + } + + public void setVersionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; + } + + public GetEncodingIdRequest codecType(String codecType) { + this.codecType = codecType; + return this; + } + + /** + * Get codecType + * @return codecType + **/ + @JsonProperty("codecType") + @ApiModelProperty(required = true, value = "") + @NotNull + public String getCodecType() { + return codecType; + } + + public void setCodecType(String codecType) { + this.codecType = codecType; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GetEncodingIdRequest getEncodingIdRequest = (GetEncodingIdRequest) o; + return Objects.equals(this.versionInfo, getEncodingIdRequest.versionInfo) && + Objects.equals(this.codecType, getEncodingIdRequest.codecType); + } + + @Override + public int hashCode() { + return Objects.hash(versionInfo, codecType); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class GetEncodingIdRequest {\n"); + + sb.append(" versionInfo: ").append(toIndentedString(versionInfo)).append("\n"); + sb.append(" codecType: ").append(toIndentedString(codecType)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistory.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistory.java new file mode 100644 index 000000000..cf195ba93 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistory.java @@ -0,0 +1,101 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.ArrayList; +import java.util.List; +import javax.validation.constraints.*; + +/** + * GroupHistory + */ + +public class GroupHistory { + @JsonProperty("history") + private List history = null; + + public GroupHistory history(List history) { + this.history = history; + return this; + } + + public GroupHistory addHistoryItem(GroupHistoryRecord historyItem) { + if (this.history == null) { + this.history = new ArrayList(); + } + this.history.add(historyItem); + return this; + } + + /** + * Chronological list of Group History records. + * @return history + **/ + @JsonProperty("history") + @ApiModelProperty(value = "Chronological list of Group History records.") + public List getHistory() { + return history; + } + + public void setHistory(List history) { + this.history = history; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GroupHistory groupHistory = (GroupHistory) o; + return Objects.equals(this.history, groupHistory.history); + } + + @Override + public int hashCode() { + return Objects.hash(history); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class GroupHistory {\n"); + + sb.append(" history: ").append(toIndentedString(history)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java new file mode 100644 index 000000000..6d7dd7476 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java @@ -0,0 +1,194 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Group History Record that describes each schema evolution - schema information, version generated for the schema, time and rules used for schema validation. + */ +@ApiModel(description = "Group History Record that describes each schema evolution - schema information, version generated for the schema, time and rules used for schema validation.") + +public class GroupHistoryRecord { + @JsonProperty("schemaInfo") + private SchemaInfo schemaInfo = null; + + @JsonProperty("version") + private VersionInfo version = null; + + @JsonProperty("validationRules") + private SchemaValidationRules validationRules = null; + + @JsonProperty("timestamp") + private Long timestamp = null; + + @JsonProperty("schemaString") + private String schemaString = null; + + public GroupHistoryRecord schemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + return this; + } + + /** + * Schema information object. + * @return schemaInfo + **/ + @JsonProperty("schemaInfo") + @ApiModelProperty(required = true, value = "Schema information object.") + @NotNull + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + public void setSchemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + } + + public GroupHistoryRecord version(VersionInfo version) { + this.version = version; + return this; + } + + /** + * Schema version information object. + * @return version + **/ + @JsonProperty("version") + @ApiModelProperty(required = true, value = "Schema version information object.") + @NotNull + public VersionInfo getVersion() { + return version; + } + + public void setVersion(VersionInfo version) { + this.version = version; + } + + public GroupHistoryRecord validationRules(SchemaValidationRules validationRules) { + this.validationRules = validationRules; + return this; + } + + /** + * Schema validation rules applied. + * @return validationRules + **/ + @JsonProperty("validationRules") + @ApiModelProperty(required = true, value = "Schema validation rules applied.") + @NotNull + public SchemaValidationRules getValidationRules() { + return validationRules; + } + + public void setValidationRules(SchemaValidationRules validationRules) { + this.validationRules = validationRules; + } + + public GroupHistoryRecord timestamp(Long timestamp) { + this.timestamp = timestamp; + return this; + } + + /** + * Timestamp when the schema was added. + * @return timestamp + **/ + @JsonProperty("timestamp") + @ApiModelProperty(required = true, value = "Timestamp when the schema was added.") + @NotNull + public Long getTimestamp() { + return timestamp; + } + + public void setTimestamp(Long timestamp) { + this.timestamp = timestamp; + } + + public GroupHistoryRecord schemaString(String schemaString) { + this.schemaString = schemaString; + return this; + } + + /** + * Schema as json string for serialization formats that registry service understands. + * @return schemaString + **/ + @JsonProperty("schemaString") + @ApiModelProperty(value = "Schema as json string for serialization formats that registry service understands.") + public String getSchemaString() { + return schemaString; + } + + public void setSchemaString(String schemaString) { + this.schemaString = schemaString; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GroupHistoryRecord groupHistoryRecord = (GroupHistoryRecord) o; + return Objects.equals(this.schemaInfo, groupHistoryRecord.schemaInfo) && + Objects.equals(this.version, groupHistoryRecord.version) && + Objects.equals(this.validationRules, groupHistoryRecord.validationRules) && + Objects.equals(this.timestamp, groupHistoryRecord.timestamp) && + Objects.equals(this.schemaString, groupHistoryRecord.schemaString); + } + + @Override + public int hashCode() { + return Objects.hash(schemaInfo, version, validationRules, timestamp, schemaString); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class GroupHistoryRecord {\n"); + + sb.append(" schemaInfo: ").append(toIndentedString(schemaInfo)).append("\n"); + sb.append(" version: ").append(toIndentedString(version)).append("\n"); + sb.append(" validationRules: ").append(toIndentedString(validationRules)).append("\n"); + sb.append(" timestamp: ").append(toIndentedString(timestamp)).append("\n"); + sb.append(" schemaString: ").append(toIndentedString(schemaString)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupProperties.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupProperties.java new file mode 100644 index 000000000..4bbb60b12 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupProperties.java @@ -0,0 +1,179 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; +import io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.validation.constraints.*; + +/** + * Metadata for a group. + */ +@ApiModel(description = "Metadata for a group.") + +public class GroupProperties { + @JsonProperty("serializationFormat") + private SerializationFormat serializationFormat = null; + + @JsonProperty("schemaValidationRules") + private SchemaValidationRules schemaValidationRules = null; + + @JsonProperty("allowMultipleTypes") + private Boolean allowMultipleTypes = null; + + @JsonProperty("properties") + private Map properties = null; + + public GroupProperties serializationFormat(SerializationFormat serializationFormat) { + this.serializationFormat = serializationFormat; + return this; + } + + /** + * serialization format for the group. + * @return serializationFormat + **/ + @JsonProperty("serializationFormat") + @ApiModelProperty(required = true, value = "serialization format for the group.") + @NotNull + public SerializationFormat getSerializationFormat() { + return serializationFormat; + } + + public void setSerializationFormat(SerializationFormat serializationFormat) { + this.serializationFormat = serializationFormat; + } + + public GroupProperties schemaValidationRules(SchemaValidationRules schemaValidationRules) { + this.schemaValidationRules = schemaValidationRules; + return this; + } + + /** + * Validation rules to apply while registering new schema. + * @return schemaValidationRules + **/ + @JsonProperty("schemaValidationRules") + @ApiModelProperty(required = true, value = "Validation rules to apply while registering new schema.") + @NotNull + public SchemaValidationRules getSchemaValidationRules() { + return schemaValidationRules; + } + + public void setSchemaValidationRules(SchemaValidationRules schemaValidationRules) { + this.schemaValidationRules = schemaValidationRules; + } + + public GroupProperties allowMultipleTypes(Boolean allowMultipleTypes) { + this.allowMultipleTypes = allowMultipleTypes; + return this; + } + + /** + * Flag to indicate whether to allow multiple schemas representing distinct objects to be registered in the group. + * @return allowMultipleTypes + **/ + @JsonProperty("allowMultipleTypes") + @ApiModelProperty(required = true, value = "Flag to indicate whether to allow multiple schemas representing distinct objects to be registered in the group.") + @NotNull + public Boolean isAllowMultipleTypes() { + return allowMultipleTypes; + } + + public void setAllowMultipleTypes(Boolean allowMultipleTypes) { + this.allowMultipleTypes = allowMultipleTypes; + } + + public GroupProperties properties(Map properties) { + this.properties = properties; + return this; + } + + public GroupProperties putPropertiesItem(String key, String propertiesItem) { + if (this.properties == null) { + this.properties = new HashMap(); + } + this.properties.put(key, propertiesItem); + return this; + } + + /** + * User defined Key value strings. + * @return properties + **/ + @JsonProperty("properties") + @ApiModelProperty(value = "User defined Key value strings.") + public Map getProperties() { + return properties; + } + + public void setProperties(Map properties) { + this.properties = properties; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GroupProperties groupProperties = (GroupProperties) o; + return Objects.equals(this.serializationFormat, groupProperties.serializationFormat) && + Objects.equals(this.schemaValidationRules, groupProperties.schemaValidationRules) && + Objects.equals(this.allowMultipleTypes, groupProperties.allowMultipleTypes) && + Objects.equals(this.properties, groupProperties.properties); + } + + @Override + public int hashCode() { + return Objects.hash(serializationFormat, schemaValidationRules, allowMultipleTypes, properties); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class GroupProperties {\n"); + + sb.append(" serializationFormat: ").append(toIndentedString(serializationFormat)).append("\n"); + sb.append(" schemaValidationRules: ").append(toIndentedString(schemaValidationRules)).append("\n"); + sb.append(" allowMultipleTypes: ").append(toIndentedString(allowMultipleTypes)).append("\n"); + sb.append(" properties: ").append(toIndentedString(properties)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ListGroupsResponse.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ListGroupsResponse.java new file mode 100644 index 000000000..966b6898f --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ListGroupsResponse.java @@ -0,0 +1,128 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.validation.constraints.*; + +/** + * Map of Group names to group properties. For partially created groups, the group properties may be null. + */ +@ApiModel(description = "Map of Group names to group properties. For partially created groups, the group properties may be null.") + +public class ListGroupsResponse { + @JsonProperty("groups") + private Map groups = null; + + @JsonProperty("continuationToken") + private String continuationToken = null; + + public ListGroupsResponse groups(Map groups) { + this.groups = groups; + return this; + } + + public ListGroupsResponse putGroupsItem(String key, GroupProperties groupsItem) { + if (this.groups == null) { + this.groups = new HashMap(); + } + this.groups.put(key, groupsItem); + return this; + } + + /** + * Get groups + * @return groups + **/ + @JsonProperty("groups") + @ApiModelProperty(value = "") + public Map getGroups() { + return groups; + } + + public void setGroups(Map groups) { + this.groups = groups; + } + + public ListGroupsResponse continuationToken(String continuationToken) { + this.continuationToken = continuationToken; + return this; + } + + /** + * Continuation token to identify the position of last group in the response. + * @return continuationToken + **/ + @JsonProperty("continuationToken") + @ApiModelProperty(required = true, value = "Continuation token to identify the position of last group in the response.") + @NotNull + public String getContinuationToken() { + return continuationToken; + } + + public void setContinuationToken(String continuationToken) { + this.continuationToken = continuationToken; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ListGroupsResponse listGroupsResponse = (ListGroupsResponse) o; + return Objects.equals(this.groups, listGroupsResponse.groups) && + Objects.equals(this.continuationToken, listGroupsResponse.continuationToken); + } + + @Override + public int hashCode() { + return Objects.hash(groups, continuationToken); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ListGroupsResponse {\n"); + + sb.append(" groups: ").append(toIndentedString(groups)).append("\n"); + sb.append(" continuationToken: ").append(toIndentedString(continuationToken)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaInfo.java new file mode 100644 index 000000000..2be4282ab --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaInfo.java @@ -0,0 +1,179 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Arrays; +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.validation.constraints.*; + +/** + * Schema information object that encapsulates various properties of a schema. + */ +@ApiModel(description = "Schema information object that encapsulates various properties of a schema.") + +public class SchemaInfo { + @JsonProperty("type") + private String type = null; + + @JsonProperty("serializationFormat") + private SerializationFormat serializationFormat = null; + + @JsonProperty("schemaData") + private byte[] schemaData = null; + + @JsonProperty("properties") + private Map properties = null; + + public SchemaInfo type(String type) { + this.type = type; + return this; + } + + /** + * Name of the schema. This identifies the type of object the schema payload represents. + * @return type + **/ + @JsonProperty("type") + @ApiModelProperty(required = true, value = "Name of the schema. This identifies the type of object the schema payload represents.") + @NotNull + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public SchemaInfo serializationFormat(SerializationFormat serializationFormat) { + this.serializationFormat = serializationFormat; + return this; + } + + /** + * Type of schema. + * @return serializationFormat + **/ + @JsonProperty("serializationFormat") + @ApiModelProperty(required = true, value = "Type of schema.") + @NotNull + public SerializationFormat getSerializationFormat() { + return serializationFormat; + } + + public void setSerializationFormat(SerializationFormat serializationFormat) { + this.serializationFormat = serializationFormat; + } + + public SchemaInfo schemaData(byte[] schemaData) { + this.schemaData = schemaData; + return this; + } + + /** + * Base64 encoded string for binary data for schema. + * @return schemaData + **/ + @JsonProperty("schemaData") + @ApiModelProperty(required = true, value = "Base64 encoded string for binary data for schema.") + @NotNull + public byte[] getSchemaData() { + return schemaData; + } + + public void setSchemaData(byte[] schemaData) { + this.schemaData = schemaData; + } + + public SchemaInfo properties(Map properties) { + this.properties = properties; + return this; + } + + public SchemaInfo putPropertiesItem(String key, String propertiesItem) { + if (this.properties == null) { + this.properties = new HashMap(); + } + this.properties.put(key, propertiesItem); + return this; + } + + /** + * User defined key value strings. + * @return properties + **/ + @JsonProperty("properties") + @ApiModelProperty(value = "User defined key value strings.") + public Map getProperties() { + return properties; + } + + public void setProperties(Map properties) { + this.properties = properties; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SchemaInfo schemaInfo = (SchemaInfo) o; + return Objects.equals(this.type, schemaInfo.type) && + Objects.equals(this.serializationFormat, schemaInfo.serializationFormat) && + Arrays.equals(this.schemaData, schemaInfo.schemaData) && + Objects.equals(this.properties, schemaInfo.properties); + } + + @Override + public int hashCode() { + return Objects.hash(type, serializationFormat, schemaData, properties); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class SchemaInfo {\n"); + + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" serializationFormat: ").append(toIndentedString(serializationFormat)).append("\n"); + sb.append(" schemaData: ").append(toIndentedString(schemaData)).append("\n"); + sb.append(" properties: ").append(toIndentedString(properties)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRule.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRule.java new file mode 100644 index 000000000..9fb9ee11d --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRule.java @@ -0,0 +1,92 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Schema validation rule base class. + */ +@ApiModel(description = "Schema validation rule base class.") + +public class SchemaValidationRule { + @JsonProperty("rule") + private Object rule = null; + + public SchemaValidationRule rule(Object rule) { + this.rule = rule; + return this; + } + + /** + * Specific schema validation rule. The only rule we have presently is Compatibility. The \"name\" is used to identify specific Rule type. The only rule supported in this is Compatibility. + * @return rule + **/ + @JsonProperty("rule") + @ApiModelProperty(required = true, value = "Specific schema validation rule. The only rule we have presently is Compatibility. The \"name\" is used to identify specific Rule type. The only rule supported in this is Compatibility.") + @NotNull + public Object getRule() { + return rule; + } + + public void setRule(Object rule) { + this.rule = rule; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SchemaValidationRule schemaValidationRule = (SchemaValidationRule) o; + return Objects.equals(this.rule, schemaValidationRule.rule); + } + + @Override + public int hashCode() { + return Objects.hash(rule); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class SchemaValidationRule {\n"); + + sb.append(" rule: ").append(toIndentedString(rule)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRules.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRules.java new file mode 100644 index 000000000..0f9d7af0b --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRules.java @@ -0,0 +1,103 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRule; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.validation.constraints.*; + +/** + * Schema validation rules to be applied for new schema addition. Currently only one rule is supported - Compatibility. + */ +@ApiModel(description = "Schema validation rules to be applied for new schema addition. Currently only one rule is supported - Compatibility.") + +public class SchemaValidationRules { + @JsonProperty("rules") + private Map rules = null; + + public SchemaValidationRules rules(Map rules) { + this.rules = rules; + return this; + } + + public SchemaValidationRules putRulesItem(String key, SchemaValidationRule rulesItem) { + if (this.rules == null) { + this.rules = new HashMap(); + } + this.rules.put(key, rulesItem); + return this; + } + + /** + * Get rules + * @return rules + **/ + @JsonProperty("rules") + @ApiModelProperty(value = "") + public Map getRules() { + return rules; + } + + public void setRules(Map rules) { + this.rules = rules; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SchemaValidationRules schemaValidationRules = (SchemaValidationRules) o; + return Objects.equals(this.rules, schemaValidationRules.rules); + } + + @Override + public int hashCode() { + return Objects.hash(rules); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class SchemaValidationRules {\n"); + + sb.append(" rules: ").append(toIndentedString(rules)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaVersionsList.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaVersionsList.java new file mode 100644 index 000000000..6be73a69d --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaVersionsList.java @@ -0,0 +1,102 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.ArrayList; +import java.util.List; +import javax.validation.constraints.*; + +/** + * List of schemas with their versions. + */ +@ApiModel(description = "List of schemas with their versions.") + +public class SchemaVersionsList { + @JsonProperty("schemas") + private List schemas = null; + + public SchemaVersionsList schemas(List schemas) { + this.schemas = schemas; + return this; + } + + public SchemaVersionsList addSchemasItem(SchemaWithVersion schemasItem) { + if (this.schemas == null) { + this.schemas = new ArrayList(); + } + this.schemas.add(schemasItem); + return this; + } + + /** + * List of schemas with their versions. + * @return schemas + **/ + @JsonProperty("schemas") + @ApiModelProperty(value = "List of schemas with their versions.") + public List getSchemas() { + return schemas; + } + + public void setSchemas(List schemas) { + this.schemas = schemas; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SchemaVersionsList schemaVersionsList = (SchemaVersionsList) o; + return Objects.equals(this.schemas, schemaVersionsList.schemas); + } + + @Override + public int hashCode() { + return Objects.hash(schemas); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class SchemaVersionsList {\n"); + + sb.append(" schemas: ").append(toIndentedString(schemas)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java new file mode 100644 index 000000000..bc0687fff --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java @@ -0,0 +1,119 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Object that encapsulates SchemaInfo and its corresponding VersionInfo objects. + */ +@ApiModel(description = "Object that encapsulates SchemaInfo and its corresponding VersionInfo objects.") + +public class SchemaWithVersion { + @JsonProperty("schemaInfo") + private SchemaInfo schemaInfo = null; + + @JsonProperty("version") + private VersionInfo version = null; + + public SchemaWithVersion schemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + return this; + } + + /** + * Schema information. + * @return schemaInfo + **/ + @JsonProperty("schemaInfo") + @ApiModelProperty(required = true, value = "Schema information.") + @NotNull + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + public void setSchemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + } + + public SchemaWithVersion version(VersionInfo version) { + this.version = version; + return this; + } + + /** + * Version information. + * @return version + **/ + @JsonProperty("version") + @ApiModelProperty(required = true, value = "Version information.") + @NotNull + public VersionInfo getVersion() { + return version; + } + + public void setVersion(VersionInfo version) { + this.version = version; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SchemaWithVersion schemaWithVersion = (SchemaWithVersion) o; + return Objects.equals(this.schemaInfo, schemaWithVersion.schemaInfo) && + Objects.equals(this.version, schemaWithVersion.version); + } + + @Override + public int hashCode() { + return Objects.hash(schemaInfo, version); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class SchemaWithVersion {\n"); + + sb.append(" schemaInfo: ").append(toIndentedString(schemaInfo)).append("\n"); + sb.append(" version: ").append(toIndentedString(version)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SerializationFormat.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SerializationFormat.java new file mode 100644 index 000000000..bc980cbd6 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SerializationFormat.java @@ -0,0 +1,154 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Serialization format enum that lists different serialization formats supported by the service. To use additional formats, use serializationFormat.Custom and supply customTypeName. + */ +@ApiModel(description = "Serialization format enum that lists different serialization formats supported by the service. To use additional formats, use serializationFormat.Custom and supply customTypeName.") + +public class SerializationFormat { + /** + * Gets or Sets serializationFormat + */ + public enum SerializationFormatEnum { + AVRO("Avro"), + + PROTOBUF("Protobuf"), + + JSON("Json"), + + ANY("Any"), + + CUSTOM("Custom"); + + private String value; + + SerializationFormatEnum(String value) { + this.value = value; + } + + @Override + @JsonValue + public String toString() { + return String.valueOf(value); + } + + @JsonCreator + public static SerializationFormatEnum fromValue(String text) { + for (SerializationFormatEnum b : SerializationFormatEnum.values()) { + if (String.valueOf(b.value).equals(text)) { + return b; + } + } + return null; + } + } + + @JsonProperty("serializationFormat") + private SerializationFormatEnum serializationFormat = null; + + @JsonProperty("customTypeName") + private String customTypeName = null; + + public SerializationFormat serializationFormat(SerializationFormatEnum serializationFormat) { + this.serializationFormat = serializationFormat; + return this; + } + + /** + * Get serializationFormat + * @return serializationFormat + **/ + @JsonProperty("serializationFormat") + @ApiModelProperty(required = true, value = "") + @NotNull + public SerializationFormatEnum getSerializationFormat() { + return serializationFormat; + } + + public void setSerializationFormat(SerializationFormatEnum serializationFormat) { + this.serializationFormat = serializationFormat; + } + + public SerializationFormat customTypeName(String customTypeName) { + this.customTypeName = customTypeName; + return this; + } + + /** + * Get customTypeName + * @return customTypeName + **/ + @JsonProperty("customTypeName") + @ApiModelProperty(value = "") + public String getCustomTypeName() { + return customTypeName; + } + + public void setCustomTypeName(String customTypeName) { + this.customTypeName = customTypeName; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SerializationFormat serializationFormat = (SerializationFormat) o; + return Objects.equals(this.serializationFormat, serializationFormat.serializationFormat) && + Objects.equals(this.customTypeName, serializationFormat.customTypeName); + } + + @Override + public int hashCode() { + return Objects.hash(serializationFormat, customTypeName); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class SerializationFormat {\n"); + + sb.append(" serializationFormat: ").append(toIndentedString(serializationFormat)).append("\n"); + sb.append(" customTypeName: ").append(toIndentedString(customTypeName)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateValidationRulesRequest.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateValidationRulesRequest.java new file mode 100644 index 000000000..92cdef2d9 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateValidationRulesRequest.java @@ -0,0 +1,116 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * UpdateValidationRulesRequest + */ + +public class UpdateValidationRulesRequest { + @JsonProperty("validationRules") + private SchemaValidationRules validationRules = null; + + @JsonProperty("previousRules") + private SchemaValidationRules previousRules = null; + + public UpdateValidationRulesRequest validationRules(SchemaValidationRules validationRules) { + this.validationRules = validationRules; + return this; + } + + /** + * Get validationRules + * @return validationRules + **/ + @JsonProperty("validationRules") + @ApiModelProperty(required = true, value = "") + @NotNull + public SchemaValidationRules getValidationRules() { + return validationRules; + } + + public void setValidationRules(SchemaValidationRules validationRules) { + this.validationRules = validationRules; + } + + public UpdateValidationRulesRequest previousRules(SchemaValidationRules previousRules) { + this.previousRules = previousRules; + return this; + } + + /** + * Get previousRules + * @return previousRules + **/ + @JsonProperty("previousRules") + @ApiModelProperty(value = "") + public SchemaValidationRules getPreviousRules() { + return previousRules; + } + + public void setPreviousRules(SchemaValidationRules previousRules) { + this.previousRules = previousRules; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + UpdateValidationRulesRequest updateValidationRulesRequest = (UpdateValidationRulesRequest) o; + return Objects.equals(this.validationRules, updateValidationRulesRequest.validationRules) && + Objects.equals(this.previousRules, updateValidationRulesRequest.previousRules); + } + + @Override + public int hashCode() { + return Objects.hash(validationRules, previousRules); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class UpdateValidationRulesRequest {\n"); + + sb.append(" validationRules: ").append(toIndentedString(validationRules)).append("\n"); + sb.append(" previousRules: ").append(toIndentedString(previousRules)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Valid.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Valid.java new file mode 100644 index 000000000..bde7b3f10 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Valid.java @@ -0,0 +1,92 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Response object for validateSchema api. + */ +@ApiModel(description = "Response object for validateSchema api.") + +public class Valid { + @JsonProperty("valid") + private Boolean valid = null; + + public Valid valid(Boolean valid) { + this.valid = valid; + return this; + } + + /** + * Whether given schema is valid with respect to existing group schemas against the configured validation rules. + * @return valid + **/ + @JsonProperty("valid") + @ApiModelProperty(required = true, value = "Whether given schema is valid with respect to existing group schemas against the configured validation rules.") + @NotNull + public Boolean isValid() { + return valid; + } + + public void setValid(Boolean valid) { + this.valid = valid; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Valid valid = (Valid) o; + return Objects.equals(this.valid, valid.valid); + } + + @Override + public int hashCode() { + return Objects.hash(valid); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class Valid {\n"); + + sb.append(" valid: ").append(toIndentedString(valid)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ValidateRequest.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ValidateRequest.java new file mode 100644 index 000000000..5daa183df --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ValidateRequest.java @@ -0,0 +1,117 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * ValidateRequest + */ + +public class ValidateRequest { + @JsonProperty("schemaInfo") + private SchemaInfo schemaInfo = null; + + @JsonProperty("validationRules") + private SchemaValidationRules validationRules = null; + + public ValidateRequest schemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + return this; + } + + /** + * Get schemaInfo + * @return schemaInfo + **/ + @JsonProperty("schemaInfo") + @ApiModelProperty(required = true, value = "") + @NotNull + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + public void setSchemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + } + + public ValidateRequest validationRules(SchemaValidationRules validationRules) { + this.validationRules = validationRules; + return this; + } + + /** + * Get validationRules + * @return validationRules + **/ + @JsonProperty("validationRules") + @ApiModelProperty(value = "") + public SchemaValidationRules getValidationRules() { + return validationRules; + } + + public void setValidationRules(SchemaValidationRules validationRules) { + this.validationRules = validationRules; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ValidateRequest validateRequest = (ValidateRequest) o; + return Objects.equals(this.schemaInfo, validateRequest.schemaInfo) && + Objects.equals(this.validationRules, validateRequest.validationRules); + } + + @Override + public int hashCode() { + return Objects.hash(schemaInfo, validationRules); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ValidateRequest {\n"); + + sb.append(" schemaInfo: ").append(toIndentedString(schemaInfo)).append("\n"); + sb.append(" validationRules: ").append(toIndentedString(validationRules)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/VersionInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/VersionInfo.java new file mode 100644 index 000000000..9b4c2603d --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/VersionInfo.java @@ -0,0 +1,142 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Version information object. + */ +@ApiModel(description = "Version information object.") + +public class VersionInfo { + @JsonProperty("type") + private String type = null; + + @JsonProperty("version") + private Integer version = null; + + @JsonProperty("ordinal") + private Integer ordinal = null; + + public VersionInfo type(String type) { + this.type = type; + return this; + } + + /** + * Type of schema for this version. This is same value used in SchemaInfo#Type for the schema this version identifies. + * @return type + **/ + @JsonProperty("type") + @ApiModelProperty(required = true, value = "Type of schema for this version. This is same value used in SchemaInfo#Type for the schema this version identifies.") + @NotNull + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public VersionInfo version(Integer version) { + this.version = version; + return this; + } + + /** + * Version number that uniquely identifies the schema version among all schemas in the group that share the same Type. + * @return version + **/ + @JsonProperty("version") + @ApiModelProperty(required = true, value = "Version number that uniquely identifies the schema version among all schemas in the group that share the same Type.") + @NotNull + public Integer getVersion() { + return version; + } + + public void setVersion(Integer version) { + this.version = version; + } + + public VersionInfo ordinal(Integer ordinal) { + this.ordinal = ordinal; + return this; + } + + /** + * Version ordinal that uniquely identifies the position of the corresponding schema across all schemas in the group. + * @return ordinal + **/ + @JsonProperty("ordinal") + @ApiModelProperty(required = true, value = "Version ordinal that uniquely identifies the position of the corresponding schema across all schemas in the group.") + @NotNull + public Integer getOrdinal() { + return ordinal; + } + + public void setOrdinal(Integer ordinal) { + this.ordinal = ordinal; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + VersionInfo versionInfo = (VersionInfo) o; + return Objects.equals(this.type, versionInfo.type) && + Objects.equals(this.version, versionInfo.version) && + Objects.equals(this.ordinal, versionInfo.ordinal); + } + + @Override + public int hashCode() { + return Objects.hash(type, version, ordinal); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class VersionInfo {\n"); + + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" version: ").append(toIndentedString(version)).append("\n"); + sb.append(" ordinal: ").append(toIndentedString(ordinal)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiException.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiException.java new file mode 100644 index 000000000..096b7c1d1 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiException.java @@ -0,0 +1,10 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + + +public class ApiException extends Exception{ + private int code; + public ApiException (int code, String msg) { + super(msg); + this.code = code; + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiOriginFilter.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiOriginFilter.java new file mode 100644 index 000000000..1ad2cce34 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiOriginFilter.java @@ -0,0 +1,22 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import java.io.IOException; + +import javax.servlet.*; +import javax.servlet.http.HttpServletResponse; + + +public class ApiOriginFilter implements javax.servlet.Filter { + public void doFilter(ServletRequest request, ServletResponse response, + FilterChain chain) throws IOException, ServletException { + HttpServletResponse res = (HttpServletResponse) response; + res.addHeader("Access-Control-Allow-Origin", "*"); + res.addHeader("Access-Control-Allow-Methods", "GET, POST, DELETE, PUT"); + res.addHeader("Access-Control-Allow-Headers", "Content-Type"); + chain.doFilter(request, response); + } + + public void destroy() {} + + public void init(FilterConfig filterConfig) throws ServletException {} +} \ No newline at end of file diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiResponseMessage.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiResponseMessage.java new file mode 100644 index 000000000..47e3f5d76 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiResponseMessage.java @@ -0,0 +1,69 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import javax.xml.bind.annotation.XmlTransient; + +@javax.xml.bind.annotation.XmlRootElement + +public class ApiResponseMessage { + public static final int ERROR = 1; + public static final int WARNING = 2; + public static final int INFO = 3; + public static final int OK = 4; + public static final int TOO_BUSY = 5; + + int code; + String type; + String message; + + public ApiResponseMessage(){} + + public ApiResponseMessage(int code, String message){ + this.code = code; + switch(code){ + case ERROR: + setType("error"); + break; + case WARNING: + setType("warning"); + break; + case INFO: + setType("info"); + break; + case OK: + setType("ok"); + break; + case TOO_BUSY: + setType("too busy"); + break; + default: + setType("unknown"); + break; + } + this.message = message; + } + + @XmlTransient + public int getCode() { + return code; + } + + public void setCode(int code) { + this.code = code; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/Bootstrap.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/Bootstrap.java new file mode 100644 index 000000000..deb52b674 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/Bootstrap.java @@ -0,0 +1,31 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import io.swagger.jaxrs.config.SwaggerContextService; +import io.swagger.models.*; + +import io.swagger.models.auth.*; + +import javax.servlet.http.HttpServlet; +import javax.servlet.ServletContext; +import javax.servlet.ServletConfig; +import javax.servlet.ServletException; + +public class Bootstrap extends HttpServlet { + @Override + public void init(ServletConfig config) throws ServletException { + Info info = new Info() + .title("Swagger Server") + .description("REST APIs for Pravega Schema Registry.") + .termsOfService("") + .contact(new Contact() + .email("")) + .license(new License() + .name("Apache 2.0") + .url("http://www.apache.org/licenses/LICENSE-2.0")); + + ServletContext context = config.getServletContext(); + Swagger swagger = new Swagger().info(info); + + new SwaggerContextService().withServletConfig(config).updateSwagger(swagger); + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApi.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApi.java new file mode 100644 index 000000000..16db9f378 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApi.java @@ -0,0 +1,412 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import io.pravega.schemaregistry.contract.generated.rest.model.*; +import io.pravega.schemaregistry.contract.generated.rest.server.api.GroupsApiService; +import io.pravega.schemaregistry.contract.generated.rest.server.api.factories.GroupsApiServiceFactory; + +import io.swagger.annotations.ApiParam; +import io.swagger.jaxrs.*; + +import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; +import io.pravega.schemaregistry.contract.generated.rest.model.CreateGroupRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.GetEncodingIdRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; +import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.Valid; +import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; + +import java.util.Map; +import java.util.List; +import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; + +import java.io.InputStream; + +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; +import org.glassfish.jersey.media.multipart.FormDataParam; + +import javax.servlet.ServletConfig; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.SecurityContext; +import javax.ws.rs.*; +import javax.validation.constraints.*; + +@Path("/groups") + + +@io.swagger.annotations.Api(description = "the groups API") + +public class GroupsApi { + private final GroupsApiService delegate; + + public GroupsApi(@Context ServletConfig servletContext) { + GroupsApiService delegate = null; + + if (servletContext != null) { + String implClass = servletContext.getInitParameter("GroupsApi.implementation"); + if (implClass != null && !"".equals(implClass.trim())) { + try { + delegate = (GroupsApiService) Class.forName(implClass).newInstance(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + } + + if (delegate == null) { + delegate = GroupsApiServiceFactory.getGroupsApi(); + } + + this.delegate = delegate; + } + + @POST + @Path("/{groupName}/codecTypes") + @Consumes({ "application/json" }) + + @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new codecType to the group.", response = Void.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added codecType to group", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while registering codectype to a Group", response = Void.class) }) + public Response addCodecType(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "The codecType" ,required=true) String codecType +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.addCodecType(groupName,codecType,securityContext); + } + @POST + @Path("/{groupName}/schemas/versions") + @Consumes({ "application/json" }) + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new schema to the group", response = VersionInfo.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added schema to the group", response = VersionInfo.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 409, message = "Incompatible schema", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 417, message = "Invalid serialization format", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while adding schema to group", response = Void.class) }) + public Response addSchema(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Add new schema to group" ,required=true) SchemaInfo schemaInfo +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.addSchema(groupName,schemaInfo,securityContext); + } + @POST + @Path("/{groupName}/schemas/versions/canRead") + @Consumes({ "application/json" }) + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema can be used for reads subject to compatibility policy in the schema validation rules.", response = CanRead.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Response to tell whether schema can be used to read existing schemas", response = CanRead.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while checking schema for readability", response = Void.class) }) + public Response canRead(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Checks if schema can be used to read the data in the stream based on compatibility rules." ,required=true) SchemaInfo schemaInfo +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.canRead(groupName,schemaInfo,securityContext); + } + @POST + + @Consumes({ "application/json" }) + + @io.swagger.annotations.ApiOperation(value = "", notes = "Create a new Group", response = Void.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added group", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 409, message = "Group with given name already exists", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while creating a Group", response = Void.class) }) + public Response createGroup(@ApiParam(value = "The Group configuration" ,required=true) CreateGroupRequest createGroupRequest +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.createGroup(createGroupRequest,securityContext); + } + @DELETE + @Path("/{groupName}") + + + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete a Group", response = Void.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Successfully deleted the Group", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting the Group", response = Void.class) }) + public Response deleteGroup(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.deleteGroup(groupName,securityContext); + } + @DELETE + @Path("/{groupName}/schemas/{type}/versions/{version}") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema version from the group.", response = Void.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class) }) + public Response deleteSchemaVersion(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Schema type from SchemaInfo#type or VersionInfo#type",required=true) @PathParam("type") String type +,@ApiParam(value = "Version number",required=true) @PathParam("version") Integer version +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.deleteSchemaVersion(groupName,type,version,securityContext); + } + @DELETE + @Path("/{groupName}/schemas/versions/{versionOrdinal}") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema identified by version from the group.", response = Void.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class) }) + public Response deleteSchemaVersionOrinal(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Version ordinal",required=true) @PathParam("versionOrdinal") Integer versionOrdinal +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.deleteSchemaVersionOrinal(groupName,versionOrdinal,securityContext); + } + @GET + @Path("/{groupName}/codecTypes") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get codecTypes for the group.", response = CodecTypesList.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found CodecTypes", response = CodecTypesList.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching codecTypes registered", response = Void.class) }) + public Response getCodecTypesList(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getCodecTypesList(groupName,securityContext); + } + @PUT + @Path("/{groupName}/encodings") + @Consumes({ "application/json" }) + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get an encoding id that uniquely identifies a schema version and codec type pair.", response = EncodingId.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Encoding", response = EncodingId.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name or version not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 412, message = "Codec type not registered", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding id", response = Void.class) }) + public Response getEncodingId(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Get schema corresponding to the version" ,required=true) GetEncodingIdRequest getEncodingIdRequest +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getEncodingId(groupName,getEncodingIdRequest,securityContext); + } + @GET + @Path("/{groupName}/encodings/{encodingId}") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get the encoding information corresponding to the encoding id.", response = EncodingInfo.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Encoding", response = EncodingInfo.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding info corresponding to encoding id", response = Void.class) }) + public Response getEncodingInfo(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Encoding id that identifies a unique combination of schema and codec type",required=true) @PathParam("encodingId") Integer encodingId +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getEncodingInfo(groupName,encodingId,securityContext); + } + @GET + @Path("/{groupName}/history") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch the history of schema evolution of a Group", response = GroupHistory.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group history", response = GroupHistory.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group history", response = Void.class) }) + public Response getGroupHistory(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getGroupHistory(groupName,securityContext); + } + @GET + @Path("/{groupName}") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch the properties of an existing Group", response = GroupProperties.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group properties", response = GroupProperties.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class) }) + public Response getGroupProperties(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getGroupProperties(groupName,securityContext); + } + @GET + @Path("/{groupName}/schemas/{type}/versions/{version}") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class) }) + public Response getSchemaFromVersion(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Schema type from SchemaInfo#type or VersionInfo#type",required=true) @PathParam("type") String type +,@ApiParam(value = "Version number",required=true) @PathParam("version") Integer version +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getSchemaFromVersion(groupName,type,version,securityContext); + } + @GET + @Path("/{groupName}/schemas/versions/{versionOrdinal}") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class) }) + public Response getSchemaFromVersionOrdinal(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Version ordinal",required=true) @PathParam("versionOrdinal") Integer versionOrdinal +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getSchemaFromVersionOrdinal(groupName,versionOrdinal,securityContext); + } + @POST + @Path("/{groupName}/schemas/versions/find") + @Consumes({ "application/json" }) + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get the version for the schema if it is registered. It does not automatically register the schema. To add new schema use addSchema", response = VersionInfo.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema version", response = VersionInfo.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error fetching version for schema", response = Void.class) }) + public Response getSchemaVersion(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Get schema corresponding to the version" ,required=true) SchemaInfo schemaInfo +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getSchemaVersion(groupName,schemaInfo,securityContext); + } + @GET + @Path("/{groupName}/schemas/versions") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get all schema versions for the group", response = SchemaVersionsList.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Versioned history of schemas registered under the group", response = SchemaVersionsList.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group schema versions", response = Void.class) }) + public Response getSchemaVersions(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Type of object the schema describes.") @QueryParam("type") String type +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getSchemaVersions(groupName,type,securityContext); + } + @GET + @Path("/{groupName}/schemas") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch latest schema versions for all objects identified by SchemaInfo#type under a Group. If query param type is specified then latest schema for the type is returned.", response = SchemaVersionsList.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Latest schemas for all objects identified by SchemaInfo#type under the group", response = SchemaVersionsList.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group's latest schemas", response = Void.class) }) + public Response getSchemas(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Type of object") @QueryParam("type") String type +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getSchemas(groupName,type,securityContext); + } + @GET + + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "List all groups", response = ListGroupsResponse.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "List of all groups", response = ListGroupsResponse.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching the list of Groups", response = Void.class) }) + public Response listGroups(@ApiParam(value = "Continuation token") @QueryParam("continuationToken") String continuationToken +,@ApiParam(value = "The numbers of items to return") @QueryParam("limit") Integer limit +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.listGroups(continuationToken,limit,securityContext); + } + @PUT + @Path("/{groupName}/rules") + @Consumes({ "application/json" }) + + @io.swagger.annotations.ApiOperation(value = "", notes = "update schema validation rules of an existing Group", response = Void.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Updated schema validation policy", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 409, message = "Write conflict", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while updating Group's schema validation rules", response = Void.class) }) + public Response updateSchemaValidationRules(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "update group policy" ,required=true) UpdateValidationRulesRequest updateValidationRulesRequest +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.updateSchemaValidationRules(groupName,updateValidationRulesRequest,securityContext); + } + @POST + @Path("/{groupName}/schemas/versions/validate") + @Consumes({ "application/json" }) + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema is compatible with schemas in the registry for current policy setting.", response = Valid.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema validation response", response = Valid.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while trying to validate schema", response = Void.class) }) + public Response validate(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Checks if schema is valid with respect to supplied validation rules" ,required=true) ValidateRequest validateRequest +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.validate(groupName,validateRequest,securityContext); + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApiService.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApiService.java new file mode 100644 index 000000000..dd8d9ef40 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApiService.java @@ -0,0 +1,54 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import io.pravega.schemaregistry.contract.generated.rest.server.api.*; +import io.pravega.schemaregistry.contract.generated.rest.model.*; + +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; + +import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; +import io.pravega.schemaregistry.contract.generated.rest.model.CreateGroupRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.GetEncodingIdRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; +import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.Valid; +import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; + +import java.util.List; +import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; + +import java.io.InputStream; + +import javax.ws.rs.core.Response; +import javax.ws.rs.core.SecurityContext; +import javax.validation.constraints.*; + +public abstract class GroupsApiService { + public abstract Response addCodecType(String groupName,String codecType,SecurityContext securityContext) throws NotFoundException; + public abstract Response addSchema(String groupName,SchemaInfo schemaInfo,SecurityContext securityContext) throws NotFoundException; + public abstract Response canRead(String groupName,SchemaInfo schemaInfo,SecurityContext securityContext) throws NotFoundException; + public abstract Response createGroup(CreateGroupRequest createGroupRequest,SecurityContext securityContext) throws NotFoundException; + public abstract Response deleteGroup(String groupName,SecurityContext securityContext) throws NotFoundException; + public abstract Response deleteSchemaVersion(String groupName,String type,Integer version,SecurityContext securityContext) throws NotFoundException; + public abstract Response deleteSchemaVersionOrinal(String groupName,Integer versionOrdinal,SecurityContext securityContext) throws NotFoundException; + public abstract Response getCodecTypesList(String groupName,SecurityContext securityContext) throws NotFoundException; + public abstract Response getEncodingId(String groupName,GetEncodingIdRequest getEncodingIdRequest,SecurityContext securityContext) throws NotFoundException; + public abstract Response getEncodingInfo(String groupName,Integer encodingId,SecurityContext securityContext) throws NotFoundException; + public abstract Response getGroupHistory(String groupName,SecurityContext securityContext) throws NotFoundException; + public abstract Response getGroupProperties(String groupName,SecurityContext securityContext) throws NotFoundException; + public abstract Response getSchemaFromVersion(String groupName,String type,Integer version,SecurityContext securityContext) throws NotFoundException; + public abstract Response getSchemaFromVersionOrdinal(String groupName,Integer versionOrdinal,SecurityContext securityContext) throws NotFoundException; + public abstract Response getSchemaVersion(String groupName,SchemaInfo schemaInfo,SecurityContext securityContext) throws NotFoundException; + public abstract Response getSchemaVersions(String groupName, String type,SecurityContext securityContext) throws NotFoundException; + public abstract Response getSchemas(String groupName, String type,SecurityContext securityContext) throws NotFoundException; + public abstract Response listGroups( String continuationToken, Integer limit,SecurityContext securityContext) throws NotFoundException; + public abstract Response updateSchemaValidationRules(String groupName,UpdateValidationRulesRequest updateValidationRulesRequest,SecurityContext securityContext) throws NotFoundException; + public abstract Response validate(String groupName,ValidateRequest validateRequest,SecurityContext securityContext) throws NotFoundException; +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/JacksonJsonProvider.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/JacksonJsonProvider.java new file mode 100644 index 000000000..e6179d25f --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/JacksonJsonProvider.java @@ -0,0 +1,18 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import com.fasterxml.jackson.databind.ObjectMapper; +import io.swagger.util.Json; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.ext.Provider; +import org.glassfish.jersey.jackson.internal.jackson.jaxrs.json.JacksonJaxbJsonProvider; + +@Provider +@Produces({MediaType.APPLICATION_JSON}) +public class JacksonJsonProvider extends JacksonJaxbJsonProvider { + private static ObjectMapper commonMapper = Json.mapper(); + + public JacksonJsonProvider() { + super.setMapper(commonMapper); + } +} \ No newline at end of file diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/NotFoundException.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/NotFoundException.java new file mode 100644 index 000000000..e9d99721b --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/NotFoundException.java @@ -0,0 +1,10 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + + +public class NotFoundException extends ApiException { + private int code; + public NotFoundException (int code, String msg) { + super(code, msg); + this.code = code; + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApi.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApi.java new file mode 100644 index 000000000..295bd1d86 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApi.java @@ -0,0 +1,74 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import io.pravega.schemaregistry.contract.generated.rest.model.*; +import io.pravega.schemaregistry.contract.generated.rest.server.api.SchemasApiService; +import io.pravega.schemaregistry.contract.generated.rest.server.api.factories.SchemasApiServiceFactory; + +import io.swagger.annotations.ApiParam; +import io.swagger.jaxrs.*; + +import io.pravega.schemaregistry.contract.generated.rest.model.AddedTo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; + +import java.util.Map; +import java.util.List; +import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; + +import java.io.InputStream; + +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; +import org.glassfish.jersey.media.multipart.FormDataParam; + +import javax.servlet.ServletConfig; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.SecurityContext; +import javax.ws.rs.*; +import javax.validation.constraints.*; + +@Path("/schemas") + + +@io.swagger.annotations.Api(description = "the schemas API") + +public class SchemasApi { + private final SchemasApiService delegate; + + public SchemasApi(@Context ServletConfig servletContext) { + SchemasApiService delegate = null; + + if (servletContext != null) { + String implClass = servletContext.getInitParameter("SchemasApi.implementation"); + if (implClass != null && !"".equals(implClass.trim())) { + try { + delegate = (SchemasApiService) Class.forName(implClass).newInstance(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + } + + if (delegate == null) { + delegate = SchemasApiServiceFactory.getSchemasApi(); + } + + this.delegate = delegate; + } + + @POST + @Path("/addedTo") + @Consumes({ "application/json" }) + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Gets a map of groups to version info where the schema if it is registered. SchemaInfo#properties is ignored while comparing the schema.", response = AddedTo.class, tags={ "Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema version", response = AddedTo.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Schema not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Schema references", response = Void.class) }) + public Response getSchemaReferences(@ApiParam(value = "Get schema references for the supplied schema" ,required=true) SchemaInfo schemaInfo +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getSchemaReferences(schemaInfo,securityContext); + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApiService.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApiService.java new file mode 100644 index 000000000..bcc19dd03 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApiService.java @@ -0,0 +1,22 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import io.pravega.schemaregistry.contract.generated.rest.server.api.*; +import io.pravega.schemaregistry.contract.generated.rest.model.*; + +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; + +import io.pravega.schemaregistry.contract.generated.rest.model.AddedTo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; + +import java.util.List; +import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; + +import java.io.InputStream; + +import javax.ws.rs.core.Response; +import javax.ws.rs.core.SecurityContext; +import javax.validation.constraints.*; + +public abstract class SchemasApiService { + public abstract Response getSchemaReferences(SchemaInfo schemaInfo,SecurityContext securityContext) throws NotFoundException; +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java new file mode 100644 index 000000000..5d19e5e5f --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java @@ -0,0 +1,42 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + + +public class StringUtil { + /** + * Check if the given array contains the given value (with case-insensitive comparison). + * + * @param array The array + * @param value The value to search + * @return true if the array contains the value + */ + public static boolean containsIgnoreCase(String[] array, String value) { + for (String str : array) { + if (value == null && str == null) return true; + if (value != null && value.equalsIgnoreCase(str)) return true; + } + return false; + } + + /** + * Join an array of strings with the given separator. + * + * Note: This might be replaced by utility method from commons-lang or guava someday + * if one of those libraries is added as dependency. + *

+ * + * @param array The array of strings + * @param separator The separator + * @return the resulting string + */ + public static String join(String[] array, String separator) { + int len = array.length; + if (len == 0) return ""; + + StringBuilder out = new StringBuilder(); + out.append(array[0]); + for (int i = 1; i < len; i++) { + out.append(separator).append(array[i]); + } + return out.toString(); + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/GroupsApiServiceFactory.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/GroupsApiServiceFactory.java new file mode 100644 index 000000000..3145181ad --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/GroupsApiServiceFactory.java @@ -0,0 +1,13 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api.factories; + +import io.pravega.schemaregistry.contract.generated.rest.server.api.GroupsApiService; +import io.pravega.schemaregistry.contract.generated.rest.server.api.impl.GroupsApiServiceImpl; + + +public class GroupsApiServiceFactory { + private final static GroupsApiService service = new GroupsApiServiceImpl(); + + public static GroupsApiService getGroupsApi() { + return service; + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/SchemasApiServiceFactory.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/SchemasApiServiceFactory.java new file mode 100644 index 000000000..8587b6fef --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/SchemasApiServiceFactory.java @@ -0,0 +1,13 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api.factories; + +import io.pravega.schemaregistry.contract.generated.rest.server.api.SchemasApiService; +import io.pravega.schemaregistry.contract.generated.rest.server.api.impl.SchemasApiServiceImpl; + + +public class SchemasApiServiceFactory { + private final static SchemasApiService service = new SchemasApiServiceImpl(); + + public static SchemasApiService getSchemasApi() { + return service; + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/GroupsApiServiceImpl.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/GroupsApiServiceImpl.java new file mode 100644 index 000000000..97aea99a9 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/GroupsApiServiceImpl.java @@ -0,0 +1,134 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api.impl; + +import io.pravega.schemaregistry.contract.generated.rest.server.api.*; +import io.pravega.schemaregistry.contract.generated.rest.model.*; + +import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; +import io.pravega.schemaregistry.contract.generated.rest.model.CreateGroupRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.GetEncodingIdRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; +import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.Valid; +import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; + +import java.util.List; +import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; + +import java.io.InputStream; + +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; + +import javax.ws.rs.core.Response; +import javax.ws.rs.core.SecurityContext; +import javax.validation.constraints.*; + +public class GroupsApiServiceImpl extends GroupsApiService { + @Override + public Response addCodecType(String groupName, String codecType, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response addSchema(String groupName, SchemaInfo schemaInfo, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response canRead(String groupName, SchemaInfo schemaInfo, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response createGroup(CreateGroupRequest createGroupRequest, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response deleteGroup(String groupName, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response deleteSchemaVersion(String groupName, String type, Integer version, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response deleteSchemaVersionOrinal(String groupName, Integer versionOrdinal, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getCodecTypesList(String groupName, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getEncodingId(String groupName, GetEncodingIdRequest getEncodingIdRequest, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getEncodingInfo(String groupName, Integer encodingId, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getGroupHistory(String groupName, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getGroupProperties(String groupName, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getSchemaFromVersion(String groupName, String type, Integer version, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getSchemaFromVersionOrdinal(String groupName, Integer versionOrdinal, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getSchemaVersion(String groupName, SchemaInfo schemaInfo, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getSchemaVersions(String groupName, String type, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getSchemas(String groupName, String type, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response listGroups( String continuationToken, Integer limit, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response updateSchemaValidationRules(String groupName, UpdateValidationRulesRequest updateValidationRulesRequest, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response validate(String groupName, ValidateRequest validateRequest, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/SchemasApiServiceImpl.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/SchemasApiServiceImpl.java new file mode 100644 index 000000000..565f0f2fb --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/SchemasApiServiceImpl.java @@ -0,0 +1,26 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api.impl; + +import io.pravega.schemaregistry.contract.generated.rest.server.api.*; +import io.pravega.schemaregistry.contract.generated.rest.model.*; + +import io.pravega.schemaregistry.contract.generated.rest.model.AddedTo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; + +import java.util.List; +import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; + +import java.io.InputStream; + +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; + +import javax.ws.rs.core.Response; +import javax.ws.rs.core.SecurityContext; +import javax.validation.constraints.*; + +public class SchemasApiServiceImpl extends SchemasApiService { + @Override + public Response getSchemaReferences(SchemaInfo schemaInfo, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java new file mode 100644 index 000000000..fa261ab42 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java @@ -0,0 +1,243 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.transform; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import io.pravega.schemaregistry.contract.generated.rest.model.Compatibility; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRule; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion; +import io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import org.apache.commons.lang3.NotImplementedException; + +import java.nio.ByteBuffer; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Provides translation (encode/decode) between the Model classes and its REST representation. + */ +public class ModelHelper { + private static final ObjectMapper MAPPER = new ObjectMapper(); + + // region decode + public static io.pravega.schemaregistry.contract.data.SchemaInfo decode(SchemaInfo schemaInfo) { + Preconditions.checkArgument(schemaInfo != null); + Preconditions.checkArgument(schemaInfo.getType() != null); + Preconditions.checkArgument(schemaInfo.getSerializationFormat() != null); + Preconditions.checkArgument(schemaInfo.getProperties() != null); + Preconditions.checkArgument(schemaInfo.getSchemaData() != null); + io.pravega.schemaregistry.contract.data.SerializationFormat serializationFormat = decode(schemaInfo.getSerializationFormat()); + return new io.pravega.schemaregistry.contract.data.SchemaInfo(schemaInfo.getType(), + serializationFormat, ByteBuffer.wrap(schemaInfo.getSchemaData()), ImmutableMap.copyOf(schemaInfo.getProperties())); + } + + public static io.pravega.schemaregistry.contract.data.SerializationFormat decode(SerializationFormat serializationFormat) { + Preconditions.checkArgument(serializationFormat != null); + switch (serializationFormat.getSerializationFormat()) { + case CUSTOM: + Preconditions.checkArgument(serializationFormat.getCustomTypeName() != null); + return io.pravega.schemaregistry.contract.data.SerializationFormat.custom(serializationFormat.getCustomTypeName()); + default: + return searchEnum(io.pravega.schemaregistry.contract.data.SerializationFormat.class, serializationFormat.getSerializationFormat().name()); + } + } + + public static io.pravega.schemaregistry.contract.data.SchemaValidationRules decode(SchemaValidationRules rules) { + Preconditions.checkArgument(rules != null); + Preconditions.checkArgument(rules.getRules() != null); + List list = rules.getRules().entrySet().stream().map(rule -> { + if (rule.getValue().getRule() instanceof Map) { + String name = (String) ((Map) rule.getValue().getRule()).get("name"); + Preconditions.checkArgument(name.equals(Compatibility.class.getSimpleName())); + + return decode(MAPPER.convertValue(rule.getValue().getRule(), Compatibility.class)); + } else if (rule.getValue().getRule() instanceof Compatibility) { + return decode((Compatibility) rule.getValue().getRule()); + } else { + throw new IllegalArgumentException("Rule not supported"); + } + }).collect(Collectors.toList()); + return io.pravega.schemaregistry.contract.data.SchemaValidationRules.of(list); + } + + public static io.pravega.schemaregistry.contract.data.Compatibility decode(Compatibility compatibility) { + Preconditions.checkArgument(compatibility.getName() != null); + Preconditions.checkArgument(compatibility.getPolicy() != null); + if (compatibility.getPolicy().equals(Compatibility.PolicyEnum.BACKWARDTILL)) { + Preconditions.checkArgument(compatibility.getBackwardTill() != null); + } + if (compatibility.getPolicy().equals(Compatibility.PolicyEnum.FORWARDTILL)) { + Preconditions.checkArgument(compatibility.getForwardTill() != null); + } + if (compatibility.getPolicy().equals(Compatibility.PolicyEnum.BACKWARDANDFORWARDTILL)) { + Preconditions.checkArgument(compatibility.getBackwardTill() != null); + Preconditions.checkArgument(compatibility.getForwardTill() != null); + } + + io.pravega.schemaregistry.contract.data.VersionInfo backwardTill = compatibility.getBackwardTill() == null ? null : decode(compatibility.getBackwardTill()); + io.pravega.schemaregistry.contract.data.VersionInfo forwardTill = compatibility.getForwardTill() == null ? null : decode(compatibility.getForwardTill()); + + return new io.pravega.schemaregistry.contract.data.Compatibility( + searchEnum(io.pravega.schemaregistry.contract.data.Compatibility.Type.class, compatibility.getPolicy().name()), + backwardTill, forwardTill); + } + + public static io.pravega.schemaregistry.contract.data.VersionInfo decode(VersionInfo versionInfo) { + Preconditions.checkArgument(versionInfo != null); + Preconditions.checkArgument(versionInfo.getType() != null); + Preconditions.checkArgument(versionInfo.getVersion() != null); + Preconditions.checkArgument(versionInfo.getOrdinal() != null); + return new io.pravega.schemaregistry.contract.data.VersionInfo(versionInfo.getType(), versionInfo.getVersion(), versionInfo.getOrdinal()); + } + + public static io.pravega.schemaregistry.contract.data.EncodingInfo decode(EncodingInfo encodingInfo) { + Preconditions.checkArgument(encodingInfo != null); + return new io.pravega.schemaregistry.contract.data.EncodingInfo(decode(encodingInfo.getVersionInfo()), + decode(encodingInfo.getSchemaInfo()), encodingInfo.getCodecType()); + } + + public static io.pravega.schemaregistry.contract.data.SchemaWithVersion decode(SchemaWithVersion schemaWithVersion) { + Preconditions.checkArgument(schemaWithVersion != null); + return new io.pravega.schemaregistry.contract.data.SchemaWithVersion(decode(schemaWithVersion.getSchemaInfo()), + decode(schemaWithVersion.getVersion())); + } + + public static io.pravega.schemaregistry.contract.data.GroupHistoryRecord decode(GroupHistoryRecord schemaEvolution) { + Preconditions.checkArgument(schemaEvolution != null); + + return new io.pravega.schemaregistry.contract.data.GroupHistoryRecord(decode(schemaEvolution.getSchemaInfo()), + decode(schemaEvolution.getVersion()), decode(schemaEvolution.getValidationRules()), schemaEvolution.getTimestamp(), + schemaEvolution.getSchemaString()); + } + + public static io.pravega.schemaregistry.contract.data.EncodingId decode(EncodingId encodingId) { + Preconditions.checkArgument(encodingId != null); + Preconditions.checkArgument(encodingId.getEncodingId() != null); + + return new io.pravega.schemaregistry.contract.data.EncodingId(encodingId.getEncodingId()); + } + + public static io.pravega.schemaregistry.contract.data.GroupProperties decode(GroupProperties groupProperties) { + Preconditions.checkArgument(groupProperties != null); + Preconditions.checkArgument(groupProperties.isAllowMultipleTypes() != null); + + return io.pravega.schemaregistry.contract.data.GroupProperties.builder().serializationFormat(decode(groupProperties.getSerializationFormat())) + .schemaValidationRules(decode(groupProperties.getSchemaValidationRules())).allowMultipleTypes(groupProperties.isAllowMultipleTypes()) + .properties(ImmutableMap.copyOf(groupProperties.getProperties())).build(); + } + // endregion + + // region encode + public static GroupHistoryRecord encode(io.pravega.schemaregistry.contract.data.GroupHistoryRecord groupHistoryRecord) { + return new GroupHistoryRecord().schemaInfo(encode(groupHistoryRecord.getSchema())) + .version(encode(groupHistoryRecord.getVersion())) + .validationRules(encode(groupHistoryRecord.getRules())) + .timestamp(groupHistoryRecord.getTimestamp()) + .schemaString(groupHistoryRecord.getSchemaString()); + } + + public static SchemaValidationRules encode(io.pravega.schemaregistry.contract.data.SchemaValidationRules rules) { + Map map = rules.getRules().entrySet().stream().collect(Collectors.toMap(rule -> { + if (rule.getValue() instanceof io.pravega.schemaregistry.contract.data.Compatibility) { + return io.pravega.schemaregistry.contract.generated.rest.model.Compatibility.class.getSimpleName(); + } else { + throw new NotImplementedException("Rule not implemented"); + } + }, rule -> { + SchemaValidationRule schemaValidationRule; + if (rule.getValue() instanceof io.pravega.schemaregistry.contract.data.Compatibility) { + schemaValidationRule = new SchemaValidationRule().rule(encode((io.pravega.schemaregistry.contract.data.Compatibility) rule.getValue())); + } else { + throw new NotImplementedException("Rule not implemented"); + } + return schemaValidationRule; + })); + return new SchemaValidationRules().rules(map); + } + + public static Compatibility encode(io.pravega.schemaregistry.contract.data.Compatibility compatibility) { + Compatibility policy = new io.pravega.schemaregistry.contract.generated.rest.model.Compatibility() + .name(compatibility.getName()) + .policy(searchEnum(Compatibility.PolicyEnum.class, compatibility.getCompatibility().name())); + if (compatibility.getBackwardTill() != null) { + VersionInfo backwardTill = encode(compatibility.getBackwardTill()); + policy = policy.backwardTill(backwardTill); + } + if (compatibility.getForwardTill() != null) { + VersionInfo forwardTill = encode(compatibility.getForwardTill()); + policy = policy.forwardTill(forwardTill); + } + return policy; + } + + public static SchemaWithVersion encode(io.pravega.schemaregistry.contract.data.SchemaWithVersion schemaWithVersion) { + return new SchemaWithVersion().schemaInfo(encode(schemaWithVersion.getSchemaInfo())) + .version(encode(schemaWithVersion.getVersionInfo())); + } + + public static GroupProperties encode(io.pravega.schemaregistry.contract.data.GroupProperties groupProperties) { + return new GroupProperties() + .serializationFormat(encode(groupProperties.getSerializationFormat())) + .properties(groupProperties.getProperties()) + .allowMultipleTypes(groupProperties.isAllowMultipleTypes()) + .schemaValidationRules(encode(groupProperties.getSchemaValidationRules())); + } + + public static VersionInfo encode(io.pravega.schemaregistry.contract.data.VersionInfo versionInfo) { + return new VersionInfo().type(versionInfo.getType()).version(versionInfo.getVersion()).ordinal(versionInfo.getOrdinal()); + } + + public static SchemaInfo encode(io.pravega.schemaregistry.contract.data.SchemaInfo schemaInfo) { + return new SchemaInfo().properties(schemaInfo.getProperties()).schemaData(schemaInfo.getSchemaData().array()) + .type(schemaInfo.getType()).serializationFormat(encode(schemaInfo.getSerializationFormat())); + } + + public static SerializationFormat encode(io.pravega.schemaregistry.contract.data.SerializationFormat serializationFormat) { + if (serializationFormat.equals(io.pravega.schemaregistry.contract.data.SerializationFormat.Custom)) { + Preconditions.checkArgument(serializationFormat.getCustomTypeName() != null); + SerializationFormat serializationFormatModel = new SerializationFormat().serializationFormat(SerializationFormat.SerializationFormatEnum.CUSTOM); + return serializationFormatModel.customTypeName(serializationFormat.getCustomTypeName()); + } else { + return new SerializationFormat().serializationFormat( + searchEnum(SerializationFormat.SerializationFormatEnum.class, serializationFormat.name())); + } + } + + public static EncodingId encode(io.pravega.schemaregistry.contract.data.EncodingId encodingId) { + return new EncodingId().encodingId(encodingId.getId()); + } + + public static EncodingInfo encode(io.pravega.schemaregistry.contract.data.EncodingInfo encodingInfo) { + return new EncodingInfo().codecType(encodingInfo.getCodecType()) + .versionInfo(encode(encodingInfo.getVersionInfo())) + .schemaInfo(encode(encodingInfo.getSchemaInfo())); + } + + // endregion + + private static > T searchEnum(Class enumeration, String search) { + for (T each : enumeration.getEnumConstants()) { + if (each.name().compareToIgnoreCase(search) == 0) { + return each; + } + } + throw new IllegalArgumentException(); + } +} \ No newline at end of file diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java new file mode 100644 index 000000000..7ce526198 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java @@ -0,0 +1,554 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.v1; + +import io.pravega.schemaregistry.contract.generated.rest.model.AddedTo; +import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; +import io.pravega.schemaregistry.contract.generated.rest.model.CreateGroupRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.GetEncodingIdRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; +import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.Valid; +import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; +import io.swagger.annotations.ApiParam; + +import javax.ws.rs.Consumes; +import javax.ws.rs.DELETE; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.container.AsyncResponse; +import javax.ws.rs.container.Suspended; +import javax.ws.rs.core.Response; + +public class ApiV1 { + @Path("/ping") + public interface Ping { + @GET + Response ping(); + } + + /** + * Sync Group apis. Identical to {@link GroupsApiAsync}. All methods in this interface are synchronous and return {@link Response} object. + * The purposes of this interface is to be used by proxy-client. + */ + @Path("/v1/groups") + @io.swagger.annotations.Api(description = "the groups API") + public interface GroupsApi { + @POST + @Path("/{groupName}/codecTypes") + @Consumes({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new codecType to the group.", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added codecType to group", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while creating a Group", response = Void.class)}) + Response addCodecType(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "The codec type", required = true) String codecType); + + @POST + @Path("/{groupName}/schemas/versions") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new schema to the group", response = VersionInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added schema to the group", response = VersionInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 409, message = "Incompatible schema", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 417, message = "Invalid serialization format", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while creating a Group", response = Void.class)}) + Response addSchema(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Add new schema to group", required = true) SchemaInfo schemaInfo); + + @POST + @Path("/{groupName}/schemas/versions/canRead") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema can be used for reads subject to compatibility policy in the schema validation rules.", response = CanRead.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Response to tell whether schema can be used to read existing schemas", response = CanRead.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while checking schema for readability", response = Void.class)}) + Response canRead(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Checks if schema can be used to read the data in the stream based on compatibility rules.", required = true) SchemaInfo schemaInfo); + + @POST + @Consumes({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Create a new Group", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added group", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 409, message = "Group with given name already exists", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while creating a Group", response = Void.class)}) + Response createGroup(@ApiParam(value = "The Group configuration", required = true) CreateGroupRequest createGroupRequest); + + @DELETE + @Path("/{groupName}") + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete a Group", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Successfully deleted the Group", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting the Group", response = Void.class)}) + Response deleteGroup(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName); + + @GET + @Path("/{groupName}/codecTypes") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get codecTypes for the group.", response = CodecTypesList.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found CodecTypes", response = CodecTypesList.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching codecTypes registered", response = Void.class)}) + Response getCodecTypesList(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName); + + @GET + @Path("/{groupName}/encodings/{encodingId}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get the encoding information corresponding to the encoding id.", response = EncodingInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Encoding", response = EncodingInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding info corresponding to encoding id", response = Void.class)}) + Response getEncodingInfo(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Encoding id that identifies a unique combination of schema and codecType", required = true) @PathParam("encodingId") Integer encodingId); + + @GET + @Path("/{groupName}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch the properties of an existing Group", response = GroupProperties.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group properties", response = GroupProperties.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) + Response getGroupProperties(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName); + + @GET + @Path("/{groupName}/history") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch the history of schema evolution of a Group", response = GroupHistory.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group history", response = GroupHistory.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group history", response = Void.class)}) + Response getGroupHistory(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName); + + @GET + @Path("/{groupName}/schemas/versions") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get all schema versions for the group", response = SchemaVersionsList.class, tags = {"Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Versioned history of schemas registered under the group", response = SchemaVersionsList.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) + Response getSchemaVersions(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Type") @QueryParam("type") String type); + + @GET + @Path("/{groupName}/schemas") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch latest schema versions for all objects identified by SchemaInfo#type under a Group. If query param type is specified then latest schema for the type is returned.", response = SchemaVersionsList.class, tags = {"Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Latest schemas for all objects identified by SchemaInfo#type under the group", response = SchemaVersionsList.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) + Response getSchemas(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Type of object") @QueryParam("type") String type); + + @PUT + @Path("/{groupName}/encodings") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get an encoding id that uniquely identifies a schema version and codec type pair.", response = EncodingId.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Encoding", response = EncodingId.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name or version not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 412, message = "Codec type not registered", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding id", response = Void.class)}) + Response getEncodingId(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Get schema corresponding to the version", required = true) GetEncodingIdRequest getEncodingIdRequest); + + @DELETE + @Path("/{groupName}/schemas/versions/{versionOrdinal}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema version from the group.", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version deleted", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class)}) + Response deleteSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version); + + @GET + @Path("/{groupName}/schemas/versions/{versionOrdinal}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class)}) + Response getSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version); + + @GET + @Path("/{groupName}/schemas/{type}/versions/{version}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class)}) + public Response getSchemaFromVersion(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Schema type from SchemaInfo#type or VersionInfo#type", required = true) @PathParam("type") String type, + @ApiParam(value = "Version number", required = true) @PathParam("version") Integer version); + + @DELETE + @Path("/{groupName}/schemas/{type}/versions/{version}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema version from the group.", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class)}) + Response deleteSchemaVersion(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Schema type from SchemaInfo#type or VersionInfo#type", required = true) @PathParam("type") String type, + @ApiParam(value = "Version number", required = true) @PathParam("version") Integer version); + + @POST + @Path("/{groupName}/schemas/versions/find") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get the version for the schema if it is registered.", response = VersionInfo.class, tags = {"Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema version", response = VersionInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) + Response getSchemaVersion(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Get schema corresponding to the version", required = true) SchemaInfo schemaInfo); + + @GET + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "List all groups", response = ListGroupsResponse.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "List of all groups", response = ListGroupsResponse.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching the list of Groups", response = Void.class)}) + Response listGroups(@ApiParam(value = "Continuation token") @QueryParam("continuationToken") String continuationToken, + @ApiParam(value = "The numbers of items to return") @QueryParam("limit") Integer limit); + + @PUT + @Path("/{groupName}/rules") + @Consumes({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "update schema validation rules of an existing Group", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Updated schema validation policy", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 409, message = "Write conflict", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while updating Group's schema validation rules", response = Void.class)}) + Response updateSchemaValidationRules(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "update group policy", required = true) UpdateValidationRulesRequest updateValidationRulesRequest); + + @POST + @Path("/{groupName}/schemas/versions/validate") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema is compatible with schemas in the registry for current policy setting.", response = Valid.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema validation response", response = Valid.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while trying to validate schema", response = Void.class)}) + Response validate(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Checks if schema is valid with respect to supplied validation rules", required = true) ValidateRequest validateRequest); + } + + /** + * ASync Group apis. Identical to {@link GroupsApi}. All methods in this interface are asynchronous and use + * {@link AsyncResponse}. This is used on service side so that all api implementation is asynchronous. + */ + @Path("/v1/groups") + @io.swagger.annotations.Api(description = "the groups API") + public interface GroupsApiAsync { + @POST + @Path("/{groupName}/codecTypes") + @Consumes({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new codecType to the group.", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added codecType to group", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while registering codectype to a Group", response = Void.class)}) + void addCodecType(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Add codec type", required = true) String codecType, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @POST + @Path("/{groupName}/schemas/versions") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new schema to the group", response = VersionInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added schema to the group", response = VersionInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 409, message = "Incompatible schema", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 417, message = "Invalid serialization format", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while adding a schema", response = Void.class)}) + void addSchema(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Add new schema to group", required = true) SchemaInfo schemaInfo, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @POST + @Path("/{groupName}/schemas/versions/canRead") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema can be used for reads subject to compatibility policy in the schema validation rules.", response = CanRead.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Response to tell whether schema can be used to read existing schemas", response = CanRead.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while checking schema for readability", response = Void.class)}) + void canRead(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Checks if schema can be used to read the data in the stream based on compatibility rules.", required = true) SchemaInfo schemaInfo, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @POST + @Consumes({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Create a new Group", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added group", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 409, message = "Group with given name already exists", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while creating a Group", response = Void.class)}) + void createGroup(@ApiParam(value = "The Group configuration", required = true) CreateGroupRequest createGroupRequest, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @DELETE + @Path("/{groupName}") + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete a Group", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Successfully deleted the Group", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting the Group", response = Void.class)}) + void deleteGroup(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}/codecTypes") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get codecTypes for the group.", response = CodecTypesList.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found CodecTypes", response = CodecTypesList.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching codecTypes registered", response = Void.class)}) + void getCodecTypesList(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}/encodings/{encodingId}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get the encoding information corresponding to the encoding id.", response = EncodingInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Encoding", response = EncodingInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding info corresponding to encoding id", response = Void.class)}) + void getEncodingInfo(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Encoding id that identifies a unique combination of schema and codecType", required = true) @PathParam("encodingId") Integer encodingId, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch the properties of an existing Group", response = GroupProperties.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group properties", response = GroupProperties.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) + void getGroupProperties(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}/history") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch the history of schema evolution of a Group", response = GroupHistory.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group history", response = GroupHistory.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group history", response = Void.class)}) + void getGroupHistory(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}/schemas/versions") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get all schema versions for the group.", response = SchemaVersionsList.class, tags = {"Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Versioned history of schemas registered under the group", response = SchemaVersionsList.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) + void getSchemaVersions(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Type") @QueryParam("type") String type, + @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}/schemas") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch latest schema versions for all objects identified by SchemaInfo#type under a Group. If query param type is specified then latest schema for the type is returned.", response = SchemaVersionsList.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Latest schemas for all objects identified by SchemaInfo#type under the group", response = SchemaVersionsList.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group's latest schemas", response = Void.class)}) + void getSchemas(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Type of object") @QueryParam("type") String type, + @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @PUT + @Path("/{groupName}/encodings") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get an encoding id that uniquely identifies a schema version and codec type pair.", response = EncodingId.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Encoding", response = EncodingId.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name or version not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 412, message = "Codec type not registered", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding id", response = Void.class)}) + void getEncodingId(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Get schema corresponding to the version", required = true) GetEncodingIdRequest getEncodingIdRequest, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}/schemas/versions/{versionOrdinal}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class)}) + void getSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @DELETE + @Path("/{groupName}/schemas/versions/{versionOrdinal}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema version from the group.", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class)}) + void deleteSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}/schemas/{type}/versions/{version}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class)}) + void getSchemaFromVersion(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Schema type from SchemaInfo#type or VersionInfo#type", required = true) @PathParam("type") String type, + @ApiParam(value = "Version number", required = true) @PathParam("version") Integer version, + @Suspended AsyncResponse asyncResponse); + + @DELETE + @Path("/{groupName}/schemas/{type}/versions/{version}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema version from the group.", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class)}) + void deleteSchemaVersion(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Schema type from SchemaInfo#type or VersionInfo#type", required = true) @PathParam("type") String type, + @ApiParam(value = "Version number", required = true) @PathParam("version") Integer version, + @Suspended AsyncResponse asyncResponse); + + @POST + @Path("/{groupName}/schemas/versions/find") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get the version for the schema if it is registered.", response = VersionInfo.class, tags = {"Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema version", response = VersionInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) + void getSchemaVersion(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Get schema corresponding to the version", required = true) SchemaInfo schemaInfo, + @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "List all groups", response = ListGroupsResponse.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "List of all groups", response = ListGroupsResponse.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching the list of Groups", response = Void.class)}) + void listGroups(@ApiParam(value = "Continuation token") @QueryParam("continuationToken") String continuationToken, + @ApiParam(value = "The numbers of items to return") @QueryParam("limit") Integer limit, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @PUT + @Path("/{groupName}/rules") + @Consumes({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "update schema validation rules of an existing Group", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Updated schema validation policy", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 409, message = "Write conflict", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while updating Group's schema validation rules", response = Void.class)}) + void updateSchemaValidationRules(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "update group policy", required = true) UpdateValidationRulesRequest updateValidationRulesRequest, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @POST + @Path("/{groupName}/schemas/versions/validate") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema is compatible with schemas in the registry for current policy setting.", response = Valid.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema validation response", response = Valid.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while trying to validate schema", response = Void.class)}) + void validate(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Checks if schema is valid with respect to supplied validation rules", required = true) ValidateRequest validateRequest, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + } + + + /** + * Sync Schemas apis. Identical to {@link SchemasApiAsync}. All methods in this interface are synchronous and return {@link Response} object. + * The purposes of this interface is to be used by proxy-client. + */ + @Path("/v1/schemas") + @io.swagger.annotations.Api(description = "the schemas API") + public interface SchemasApi { + @POST + @Path("/addedTo") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Gets a map of groups to version info where the schema if it is registered. SchemaInfo#properties is ignored while comparing the schema.", response = AddedTo.class, tags = {"Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema version", response = AddedTo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Schema not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Schema references", response = Void.class)}) + Response getSchemaReferences(@ApiParam(value = "Get schema references for the supplied schema", required = true) SchemaInfo schemaInfo); + + } + + /** + * Sync Schemas apis. Identical to {@link SchemasApi}. All methods in this interface are asynchronous. + */ + @Path("/v1/schemas") + @io.swagger.annotations.Api(description = "the schemas API") + public interface SchemasApiAsync { + @POST + @Path("/addedTo") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Gets a map of groups to version info where the schema if it is registered. SchemaInfo#properties is ignored while comparing the schema.", response = AddedTo.class, tags = {"Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema version", response = AddedTo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Schema not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Schema references", response = Void.class)}) + void getSchemaReferences(@ApiParam(value = "Get schema references for the supplied schema", required = true) SchemaInfo schemaInfo, + @Suspended AsyncResponse asyncResponse); + } + +} diff --git a/contract/src/main/swagger/README.md b/contract/src/main/swagger/README.md new file mode 100644 index 000000000..bc196acf3 --- /dev/null +++ b/contract/src/main/swagger/README.md @@ -0,0 +1,44 @@ + +Instructions to generate Server REST API stubs + +## Delete previously generated directory +``` +rm -Rf server/src/main/java/io/pravega/schemaregistry/server/io.pravega.rest/generated +``` + +## Update schemaregistry.yaml +All REST API modifications should be done by updating the swagger/schemaregistry.yaml specification file. +This can be done manually or by using the online editor at http://editor.swagger.io. + +## Download Swagger codegen +Download swagger-codegen-cli from maven - http://repo1.maven.org/maven2/io/swagger/swagger-codegen-cli/2.2.3/swagger-codegen-cli-2.2.3.jar + +## Generate the API stubs using Swagger Codegen +``` +java -jar swagger-codegen-cli.jar generate -i /contract/src/main/swagger/SchemaRegistry.yaml -l jaxrs -c /contract/src/main/swagger/server.config.json -o /contract/ +``` + +## Remove extra files created by codegen +All files that get generated outside of the contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest folder should be deleted and not committed to git. + +## Update ApiV1.java +The JAXRS API stubs decorated with swagger annotations are generated in .../contract/io/pravega/schemaregistry/contract/v1/ApiV1.java class. +Copy these API descriptions into interfaces in .../contract/io.pravega.schemaregistry/contract//v1/ApiV1.java. +Also make an asynchronour version of APIs in .../contract/io.pravega.schemaregistry/server/rest/v1/ApiV1.java to use only jersey async interfaces. + +## Generate documentation +### Download Swagger2Markup CLI +https://jcenter.bintray.com/io/github/swagger2markup/swagger2markup-cli/1.3.3/swagger2markup-cli-1.3.3.jar + +### Generate and save the markup documentation +``` +java -Dswagger2markup.markupLanguage=MARKDOWN -Dswagger2markup.generatedExamplesEnabled=true -jar swagger2markup-cli-1.3.3.jar convert -i /contract/src/main/swagger/schemaregistry.yaml -f /documentation/src/docs/io.pravega.rest/restapis +``` diff --git a/contract/src/main/swagger/SchemaRegistry.yaml b/contract/src/main/swagger/SchemaRegistry.yaml new file mode 100644 index 000000000..92dba5fd1 --- /dev/null +++ b/contract/src/main/swagger/SchemaRegistry.yaml @@ -0,0 +1,867 @@ +# +# Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Description of the Pravega Schema Registry APIs. + +swagger: "2.0" +info: + description: "REST APIs for Pravega Schema Registry." + version: "0.0.1" + title: Pravega Schema Registry APIs + license: + name: "Apache 2.0" + url: "http://www.apache.org/licenses/LICENSE-2.0" +basePath: "/v1" +tags: +- name: "Group" + description: "Group related APIs" +- name: "Schemas" + description: "Schema related APIs" +schemes: + - http +paths: + /groups: + get: + tags: + - "Group" + operationId: listGroups + description: List all groups + produces: + - application/json + parameters: + - in: query + name: continuationToken + type: string + description: Continuation token + - in: query + name: limit + type: integer + description: The numbers of items to return + required: + - limit + responses: + 200: + description: List of all groups + schema: + $ref: "#/definitions/ListGroupsResponse" + 500: + description: Internal server error while fetching the list of Groups + post: + tags: + - "Group" + operationId: createGroup + description: Create a new Group + consumes: + - application/json + parameters: + - in: body + name: CreateGroupRequest + description: The Group configuration + required: true + schema: + type: object + properties: + groupName: + type: string + groupProperties: + $ref: "#/definitions/GroupProperties" + required: + - groupName + - groupProperties + responses: + 201: + description: Successfully added group + 409: + description: Group with given name already exists + 500: + description: Internal server error while creating a Group + /groups/{groupName}: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + get: + tags: + - "Group" + operationId: getGroupProperties + description: Fetch the properties of an existing Group + produces: + - application/json + responses: + 200: + description: Found Group properties + schema: + $ref: "#/definitions/GroupProperties" + 404: + description: Group with given name not found + 500: + description: Internal server error while fetching Group details + delete: + tags: + - "Group" + operationId: deleteGroup + description: Delete a Group + responses: + 204: + description: Successfully deleted the Group + 500: + description: Internal server error while deleting the Group + /groups/{groupName}/history: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + get: + tags: + - "Group" + operationId: getGroupHistory + description: Fetch the history of schema evolution of a Group + produces: + - application/json + responses: + 200: + description: Found Group history + schema: + $ref: "#/definitions/GroupHistory" + 404: + description: Group with given name not found + 500: + description: Internal server error while fetching Group history + /groups/{groupName}/rules: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + put: + tags: + - "Group" + operationId: updateSchemaValidationRules + description: update schema validation rules of an existing Group + consumes: + - application/json + parameters: + - in: body + name: UpdateValidationRulesRequest + description: update group policy + required: true + schema: + type: object + properties: + validationRules: + $ref: "#/definitions/SchemaValidationRules" + previousRules: + $ref: "#/definitions/SchemaValidationRules" + nullable: true + required: + - validationRules + responses: + 200: + description: Updated schema validation policy + 404: + description: Group with given name not found + 409: + description: Write conflict + 500: + description: Internal server error while updating Group's schema validation rules + /groups/{groupName}/schemas: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + - in: query + name: type + type: string + description: Type of object + get: + tags: + - "Group" + operationId: getSchemas + description: Fetch latest schema versions for all objects identified by SchemaInfo#type under a Group. If query param type is specified then latest schema for the type is returned. + produces: + - application/json + responses: + 200: + description: Latest schemas for all objects identified by SchemaInfo#type under the group + schema: + $ref: "#/definitions/SchemaVersionsList" + 404: + description: Group with given name not found + 500: + description: Internal server error while fetching Group's latest schemas + /groups/{groupName}/schemas/versions: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + get: + tags: + - "Group" + operationId: getSchemaVersions + description: Get all schema versions for the group + parameters: + - in: query + name: type + type: string + description: Type of object the schema describes. + produces: + - application/json + responses: + 200: + description: Versioned history of schemas registered under the group + schema: + $ref: "#/definitions/SchemaVersionsList" + 404: + description: Group with given name not found + 500: + description: Internal server error while fetching Group schema versions + post: + tags: + - "Group" + operationId: addSchema + description: Adds a new schema to the group + consumes: + - application/json + parameters: + - in: body + name: schemaInfo + description: Add new schema to group + required: true + schema: + $ref: "#/definitions/SchemaInfo" + produces: + - application/json + responses: + 201: + description: Successfully added schema to the group + schema: + $ref: "#/definitions/VersionInfo" + 404: + description: Group not found + 409: + description: Incompatible schema + 417: + description: Invalid serialization format + 500: + description: Internal server error while adding schema to group + /groups/{groupName}/schemas/versions/find: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + post: + tags: + - "Group" + operationId: getSchemaVersion + description: Get the version for the schema if it is registered. It does not automatically register the schema. To add new schema use addSchema + consumes: + - application/json + parameters: + - in: body + name: schemaInfo + description: Get schema corresponding to the version + required: true + schema: + $ref: "#/definitions/SchemaInfo" + produces: + - application/json + responses: + 200: + description: Schema version + schema: + $ref: "#/definitions/VersionInfo" + 404: + description: Group with given name not found + 500: + description: Internal server error fetching version for schema + /groups/{groupName}/schemas/versions/{versionOrdinal}: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + - in: path + name: versionOrdinal + description: Version ordinal + required: true + type: integer + format: int32 + get: + tags: + - "Group" + operationId: getSchemaFromVersionOrdinal + description: Get schema from the version ordinal that uniquely identifies the schema in the group. + produces: + - application/json + responses: + 200: + description: Schema corresponding to the version + schema: + $ref: "#/definitions/SchemaInfo" + 404: + description: Group with given name not found + 500: + description: Internal server error while fetching schema from version + delete: + tags: + - "Group" + operationId: deleteSchemaVersionOrinal + description: Delete schema identified by version from the group. + produces: + - application/json + responses: + 204: + description: Schema corresponding to the version + 404: + description: Group with given name not found + 500: + description: Internal server error while deleting schema from group + /groups/{groupName}/schemas/{type}/versions/{version}: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + - in: path + name: type + description: Schema type from SchemaInfo#type or VersionInfo#type + required: true + type: string + - in: path + name: version + description: Version number + required: true + type: integer + format: int32 + get: + tags: + - "Group" + operationId: getSchemaFromVersion + description: Get schema from the version ordinal that uniquely identifies the schema in the group. + produces: + - application/json + responses: + 200: + description: Schema corresponding to the version + schema: + $ref: "#/definitions/SchemaInfo" + 404: + description: Group with given name not found + 500: + description: Internal server error while fetching schema from version + delete: + tags: + - "Group" + operationId: deleteSchemaVersion + description: Delete schema version from the group. + produces: + - application/json + responses: + 204: + description: Schema corresponding to the version + 404: + description: Group with given name not found + 500: + description: Internal server error while deleting schema from group + /groups/{groupName}/schemas/versions/validate: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + post: + tags: + - "Group" + operationId: validate + description: Checks if given schema is compatible with schemas in the registry for current policy setting. + consumes: + - application/json + parameters: + - in: body + name: ValidateRequest + description: Checks if schema is valid with respect to supplied validation rules + required: true + schema: + type: object + properties: + schemaInfo: + $ref: "#/definitions/SchemaInfo" + validationRules: + $ref: "#/definitions/SchemaValidationRules" + required: + - schemaInfo + produces: + - application/json + responses: + 200: + description: Schema validation response + schema: + $ref: "#/definitions/Valid" + 404: + description: Group with given name not found + 500: + description: Internal server error while trying to validate schema + /groups/{groupName}/schemas/versions/canRead: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + post: + tags: + - "Group" + operationId: canRead + description: Checks if given schema can be used for reads subject to compatibility policy in the schema validation rules. + consumes: + - application/json + parameters: + - in: body + name: schemaInfo + description: Checks if schema can be used to read the data in the stream based on compatibility rules. + required: true + schema: + $ref: "#/definitions/SchemaInfo" + produces: + - application/json + responses: + 200: + description: Response to tell whether schema can be used to read existing schemas + schema: + $ref: "#/definitions/CanRead" + 404: + description: Group with given name not found + 500: + description: Internal server error while checking schema for readability + /groups/{groupName}/encodings: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + put: + tags: + - "Group" + operationId: getEncodingId + description: Get an encoding id that uniquely identifies a schema version and codec type pair. + consumes: + - application/json + parameters: + - in: body + name: GetEncodingIdRequest + description: Get schema corresponding to the version + required: true + schema: + type: object + properties: + versionInfo: + $ref: "#/definitions/VersionInfo" + codecType: + type: string + required: + - versionInfo + - codecType + produces: + - application/json + responses: + 200: + description: Found Encoding + schema: + $ref: "#/definitions/EncodingId" + 404: + description: Group with given name or version not found + 412: + description: Codec type not registered + 500: + description: Internal server error while getting encoding id + /groups/{groupName}/encodings/{encodingId}: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + - in: path + name: encodingId + description: Encoding id that identifies a unique combination of schema and codec type + required: true + type: integer + format: int32 + get: + tags: + - "Group" + operationId: getEncodingInfo + description: Get the encoding information corresponding to the encoding id. + produces: + - application/json + responses: + 200: + description: Found Encoding + schema: + $ref: "#/definitions/EncodingInfo" + 404: + description: Group or encoding id with given name not found + 500: + description: Internal server error while getting encoding info corresponding to encoding id + /groups/{groupName}/codecTypes: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + get: + tags: + - "Group" + operationId: getCodecTypesList + description: Get codecTypes for the group. + produces: + - application/json + responses: + 200: + description: Found CodecTypes + schema: + $ref: "#/definitions/CodecTypesList" + 404: + description: Group or encoding id with given name not found + 500: + description: Internal server error while fetching codecTypes registered + post: + tags: + - "Group" + operationId: addCodecType + description: Adds a new codecType to the group. + consumes: + - application/json + parameters: + - in: body + name: codecType + description: The codecType + required: true + schema: + type: string + responses: + 201: + description: Successfully added codecType to group + 404: + description: Group not found + 500: + description: Internal server error while registering codectype to a Group + /schemas/addedTo: + parameters: + post: + tags: + - "Schema" + operationId: getSchemaReferences + description: Gets a map of groups to version info where the schema if it is registered. SchemaInfo#properties is ignored while comparing the schema. + consumes: + - application/json + parameters: + - in: body + name: schemaInfo + description: Get schema references for the supplied schema + required: true + schema: + $ref: "#/definitions/SchemaInfo" + produces: + - application/json + responses: + 200: + description: Schema version + schema: + $ref: "#/definitions/AddedTo" + 404: + description: Schema not found + 500: + description: Internal server error while fetching Schema references +definitions: + ListGroupsResponse: + type: object + description: Map of Group names to group properties. For partially created groups, the group properties may be null. + properties: + groups: + type: object + additionalProperties: + $ref: "#/definitions/GroupProperties" + continuationToken: + description: Continuation token to identify the position of last group in the response. + type: string + required: + - continuationToken + GroupProperties: + type: object + description: Metadata for a group. + properties: + serializationFormat: + description: serialization format for the group. + $ref: "#/definitions/SerializationFormat" + schemaValidationRules: + description: Validation rules to apply while registering new schema. + $ref: "#/definitions/SchemaValidationRules" + allowMultipleTypes: + description: Flag to indicate whether to allow multiple schemas representing distinct objects to be registered in the group. + type: boolean + properties: + description: User defined Key value strings. + type: object + additionalProperties: + type: string + minLength: 0 + maxLength: 40 + required: + - serializationFormat + - allowMultipleTypes + - schemaValidationRules + SerializationFormat: + type: object + description: Serialization format enum that lists different serialization formats supported by the service. To use additional formats, use serializationFormat.Custom and supply customTypeName. + properties: + serializationFormat: + type: string + enum: + - Avro + - Protobuf + - Json + - Any + - Custom + customTypeName: + type: string + required: + - serializationFormat + SchemaInfo: + type: object + description: Schema information object that encapsulates various properties of a schema. + properties: + type: + description: Name of the schema. This identifies the type of object the schema payload represents. + type: string + serializationFormat: + description: Type of schema. + $ref: "#/definitions/SerializationFormat" + schemaData: + description: Base64 encoded string for binary data for schema. + type: string + format: binary + properties: + description: User defined key value strings. + type: object + additionalProperties: + type: string + minLength: 0 + maxLength: 40 + required: + - type + - serializationFormat + - schemaData + VersionInfo: + description: Version information object. + type: object + properties: + type: + description: Type of schema for this version. This is same value used in SchemaInfo#Type for the schema this version identifies. + type: string + version: + description: Version number that uniquely identifies the schema version among all schemas in the group that share the same Type. + type: integer + format: int32 + ordinal: + description: Version ordinal that uniquely identifies the position of the corresponding schema across all schemas in the group. + type: integer + format: int32 + required: + - type + - version + - ordinal + SchemaWithVersion: + type: object + description: Object that encapsulates SchemaInfo and its corresponding VersionInfo objects. + properties: + schemaInfo: + description: Schema information. + $ref: "#/definitions/SchemaInfo" + version: + description: Version information. + $ref: "#/definitions/VersionInfo" + required: + - schemaInfo + - version + SchemaVersionsList: + type: object + description: List of schemas with their versions. + properties: + schemas: + description: List of schemas with their versions. + type: array + items: + $ref: "#/definitions/SchemaWithVersion" + EncodingId: + type: object + description: Encoding id that uniquely identifies a schema version and codec type pair. + properties: + encodingId: + type: integer + format: int32 + description: encoding id generated by service. + required: + - encodingId + EncodingInfo: + type: object + description: Encoding information object that resolves the schema version and codec type used for corresponding encoding id. + properties: + schemaInfo: + description: Schema information object. + $ref: "#/definitions/SchemaInfo" + versionInfo: + description: Version information object. + $ref: "#/definitions/VersionInfo" + codecType: + description: Codec type. + type: string + required: + - schemaInfo + - versionInfo + - codecType + Compatibility: + type: object + description: Schema Compatibility validation rule. + required: + - name + - policy + properties: + name: + type: string + description: Name is used to identify the type of SchemaValidationRule. For Compatibility rule the name should be "Compatibility". + policy: + description: Compatibility policy enum. + type: string + enum: + - AllowAny + - DenyAll + - Backward + - Forward + - ForwardTransitive + - BackwardTransitive + - BackwardTill + - ForwardTill + - BackwardAndForwardTill + - Full + - FullTransitive + backwardTill: + description: Version for backward till if policy is BackwardTill or BackwardAndForwardTill. + $ref: "#/definitions/VersionInfo" + forwardTill: + description: Version for forward till if policy is ForwardTill or BackwardAndForwardTill. + $ref: "#/definitions/VersionInfo" + SchemaValidationRules: + type: object + description: Schema validation rules to be applied for new schema addition. Currently only one rule is supported - Compatibility. + properties: + rules: + type: object + additionalProperties: + $ref: "#/definitions/SchemaValidationRule" + SchemaValidationRule: + type: object + description: Schema validation rule base class. + required: + - rule + properties: + rule: + description: Specific schema validation rule. The only rule we have presently is Compatibility. The "name" is used to identify specific Rule type. The only rule supported in this is Compatibility. + oneOf: + - $ref: '#/definitions/Compatibility' + discriminator: + propertyName: name + mapping: + Compatibility: '#/definitions/Compatibility' + CodecTypesList: + type: object + description: Response object for listCodecTypes. + properties: + codecTypes: + type: array + description: List of codecTypes. + items: + type: string + Valid: + type: object + description: Response object for validateSchema api. + properties: + valid: + description: Whether given schema is valid with respect to existing group schemas against the configured validation rules. + type: boolean + required: + - valid + CanRead: + type: object + description: Response object for canRead api. + properties: + compatible: + description: Whether given schema is compatible and can be used for reads. Compatibility is checked against existing group schemas subject to group's configured compatibility policy. + type: boolean + required: + - compatible + GroupHistoryRecord: + type: object + description: Group History Record that describes each schema evolution - schema information, version generated for the schema, time and rules used for schema validation. + properties: + schemaInfo: + description: Schema information object. + $ref: "#/definitions/SchemaInfo" + version: + description: Schema version information object. + $ref: "#/definitions/VersionInfo" + validationRules: + description: Schema validation rules applied. + $ref: "#/definitions/SchemaValidationRules" + timestamp: + description: Timestamp when the schema was added. + type: integer + format: int64 + schemaString: + description: Schema as json string for serialization formats that registry service understands. + type: string + required: + - schemaInfo + - version + - validationRules + - timestamp + GroupHistory: + type: object + properties: + history: + type: array + description: Chronological list of Group History records. + items: + $ref: "#/definitions/GroupHistoryRecord" + AddedTo: + type: object + description: Map of Group names to versionInfos in the group. This is for all the groups where the schema is registered. + properties: + groups: + type: object + additionalProperties: + $ref: "#/definitions/VersionInfo" + required: + - groups diff --git a/contract/src/main/swagger/server.config.json b/contract/src/main/swagger/server.config.json new file mode 100644 index 000000000..f1ac7594b --- /dev/null +++ b/contract/src/main/swagger/server.config.json @@ -0,0 +1,8 @@ +{ +"sourceFolder" : "src/main/java", +"implFolder" : "src/main/java", +"modelPackage" : "io.pravega.schemaregistry.contract.generated.rest.model", +"apiPackage" : "io.pravega.schemaregistry.contract.generated.rest.server.api", +"library" : "jersey2", +"hideGenerationTimestamp" : true +} diff --git a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java new file mode 100644 index 000000000..c724715db --- /dev/null +++ b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java @@ -0,0 +1,138 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.transform; + +import com.google.common.collect.ImmutableMap; +import io.pravega.schemaregistry.contract.data.GroupHistoryRecord; +import io.pravega.schemaregistry.contract.generated.rest.model.Compatibility; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion; +import io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import org.junit.Test; + +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Collections; + +import static org.junit.Assert.*; + +public class ModelHelperTest { + @Test + public void testDecode() { + SerializationFormat type = new SerializationFormat().serializationFormat(SerializationFormat.SerializationFormatEnum.CUSTOM).customTypeName("a"); + SchemaValidationRules rules = new SchemaValidationRules().rules(Collections.emptyMap()); + SchemaInfo schema = new SchemaInfo() + .type("a").serializationFormat(type).schemaData(new byte[0]).properties(Collections.emptyMap()); + VersionInfo version = new VersionInfo().type("a").version(1).ordinal(1); + Compatibility compatibility = new Compatibility().name(Compatibility.class.getSimpleName()) + .policy(Compatibility.PolicyEnum.BACKWARDANDFORWARDTILL).backwardTill(version).forwardTill(version); + String codecType = "custom"; + + // decodes + io.pravega.schemaregistry.contract.data.SerializationFormat serializationFormat = ModelHelper.decode(type); + assertEquals(serializationFormat, io.pravega.schemaregistry.contract.data.SerializationFormat.Custom); + assertEquals(serializationFormat.getCustomTypeName(), "a"); + + io.pravega.schemaregistry.contract.data.SchemaInfo schemaInfo = ModelHelper.decode(schema); + assertEquals(schemaInfo.getType(), "a"); + assertEquals(schemaInfo.getSerializationFormat(), serializationFormat); + assertNotNull(schemaInfo.getSchemaData()); + assertNotNull(schemaInfo.getProperties()); + + io.pravega.schemaregistry.contract.data.Compatibility compatibilityDecoded = ModelHelper.decode(compatibility); + assertEquals(compatibilityDecoded.getCompatibility(), io.pravega.schemaregistry.contract.data.Compatibility.Type.BackwardAndForwardTill); + + io.pravega.schemaregistry.contract.data.SchemaValidationRules rulesDecoded = ModelHelper.decode(rules); + assertEquals(rulesDecoded.getRules().size(), 0); + + io.pravega.schemaregistry.contract.data.VersionInfo versionInfo = ModelHelper.decode(version); + assertEquals(versionInfo.getType(), version.getType()); + assertEquals(versionInfo.getVersion(), version.getVersion().intValue()); + + io.pravega.schemaregistry.contract.data.EncodingInfo encodingInfo = ModelHelper.decode(new EncodingInfo().schemaInfo(schema).versionInfo(version).codecType(codecType)); + assertEquals(encodingInfo.getCodecType(), "custom"); + assertEquals(encodingInfo.getVersionInfo(), versionInfo); + assertEquals(encodingInfo.getSchemaInfo(), schemaInfo); + io.pravega.schemaregistry.contract.data.SchemaWithVersion schemaWithVersion = ModelHelper.decode(new SchemaWithVersion().schemaInfo(schema).version(version)); + assertEquals(schemaWithVersion.getVersionInfo(), versionInfo); + assertEquals(schemaWithVersion.getSchemaInfo(), schemaInfo); + + io.pravega.schemaregistry.contract.data.EncodingId encodingId = ModelHelper.decode(new EncodingId().encodingId(1)); + assertEquals(encodingId.getId(), 1); + } + + @Test + public void testEncode() { + io.pravega.schemaregistry.contract.data.SerializationFormat serializationFormat = io.pravega.schemaregistry.contract.data.SerializationFormat.custom("custom"); + io.pravega.schemaregistry.contract.data.SchemaInfo schemaInfo = new io.pravega.schemaregistry.contract.data.SchemaInfo( + "name", serializationFormat, ByteBuffer.wrap(new byte[0]), ImmutableMap.of()); + io.pravega.schemaregistry.contract.data.VersionInfo versionInfo = new io.pravega.schemaregistry.contract.data.VersionInfo("a", 0, 1); + io.pravega.schemaregistry.contract.data.Compatibility rule = io.pravega.schemaregistry.contract.data.Compatibility.backwardTillAndForwardTill( + new io.pravega.schemaregistry.contract.data.VersionInfo("a", 0, 0), + new io.pravega.schemaregistry.contract.data.VersionInfo("a", 1, 1)); + io.pravega.schemaregistry.contract.data.SchemaValidationRules schemaValidationRules = io.pravega.schemaregistry.contract.data.SchemaValidationRules.of(rule); + io.pravega.schemaregistry.contract.data.GroupProperties prop = io.pravega.schemaregistry.contract.data.GroupProperties + .builder().serializationFormat(serializationFormat).schemaValidationRules(schemaValidationRules) + .allowMultipleTypes(true).properties(ImmutableMap.of()).build(); + String codecType = "codecType"; + + // encode test + VersionInfo version = ModelHelper.encode(versionInfo); + assertEquals(version.getVersion().intValue(), versionInfo.getVersion()); + assertEquals(version.getType(), versionInfo.getType()); + + SerializationFormat type = ModelHelper.encode(serializationFormat); + assertEquals(type.getSerializationFormat(), SerializationFormat.SerializationFormatEnum.CUSTOM); + + SchemaInfo schema = ModelHelper.encode(schemaInfo); + assertEquals(schema.getType(), schemaInfo.getType()); + assertEquals(schema.getProperties(), schemaInfo.getProperties()); + assertTrue(Arrays.equals(schema.getSchemaData(), schemaInfo.getSchemaData().array())); + assertEquals(schema.getSerializationFormat(), type); + + EncodingId encodingId = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.EncodingId(0)); + assertEquals(encodingId.getEncodingId().intValue(), 0); + + EncodingInfo encodingInfo = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.EncodingInfo(versionInfo, schemaInfo, codecType)); + assertEquals(encodingInfo.getCodecType(), codecType); + assertEquals(encodingInfo.getVersionInfo(), version); + assertEquals(encodingInfo.getSchemaInfo(), schema); + + SchemaValidationRules rules = ModelHelper.encode(schemaValidationRules); + assertEquals(rules.getRules().size(), 1); + + io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord schemaEvolution = ModelHelper.encode(new GroupHistoryRecord( + schemaInfo, versionInfo, schemaValidationRules, 100L, "")); + assertEquals(schemaEvolution.getSchemaInfo(), schema); + assertEquals(schemaEvolution.getValidationRules(), rules); + assertEquals(schemaEvolution.getVersion(), version); + assertEquals(schemaEvolution.getTimestamp().longValue(), 100L); + assertEquals(schemaEvolution.getSchemaString(), ""); + + Compatibility compatibility = ModelHelper.encode(rule); + assertEquals(compatibility.getPolicy(), Compatibility.PolicyEnum.BACKWARDANDFORWARDTILL); + + SchemaWithVersion schemaWithVersion = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.SchemaWithVersion(schemaInfo, versionInfo)); + assertEquals(schemaWithVersion.getSchemaInfo(), schema); + assertEquals(schemaWithVersion.getVersion(), version); + + GroupProperties groupProperties = ModelHelper.encode(prop); + assertEquals(groupProperties.getSerializationFormat(), type); + assertEquals(groupProperties.getSchemaValidationRules(), rules); + assertEquals(groupProperties.isAllowMultipleTypes(), prop.isAllowMultipleTypes()); + assertEquals(groupProperties.getProperties(), prop.getProperties()); + } + +} From b6348679283c3ea47a41649ba976c34faf2f7bc0 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Sun, 7 Jun 2020 19:43:00 -0700 Subject: [PATCH 02/70] copying over to new fork Signed-off-by: Shivesh Ranjan --- build.gradle | 66 +- .../client/SchemaRegistryClient.java | 358 ++++++++ .../client/SchemaRegistryClientConfig.java | 31 + .../client/SchemaRegistryClientFactory.java | 25 + .../client/SchemaRegistryClientImpl.java | 461 ++++++++++ .../client/exceptions/RegistryExceptions.java | 187 ++++ .../client/TestSchemaRegistryClient.java | 612 +++++++++++++ .../common/ContinuationTokenIterator.java | 93 ++ .../pravega/schemaregistry/common/Either.java | 51 ++ .../schemaregistry/common/HashUtil.java | 21 + .../common/ContinuationTokenIteratorTest.java | 68 ++ .../contract/data/Compatibility.java | 203 ++++ .../contract/data/EncodingId.java | 35 + .../contract/data/EncodingInfo.java | 33 + .../contract/data/GroupHistoryRecord.java | 47 + .../contract/data/GroupProperties.java | 74 ++ .../contract/data/SchemaInfo.java | 62 ++ .../contract/data/SchemaValidationRule.java | 23 + .../contract/data/SchemaValidationRules.java | 66 ++ .../contract/data/SchemaWithVersion.java | 31 + .../contract/data/SerializationFormat.java | 45 + .../contract/data/VersionInfo.java | 47 + .../generated/rest/model/AddedTo.java | 101 ++ .../generated/rest/model/CanRead.java | 92 ++ .../generated/rest/model/CodecTypesList.java | 101 ++ .../generated/rest/model/Compatibility.java | 216 +++++ .../rest/model/CreateGroupRequest.java | 117 +++ .../generated/rest/model/EncodingId.java | 92 ++ .../generated/rest/model/EncodingInfo.java | 144 +++ .../rest/model/GetEncodingIdRequest.java | 117 +++ .../generated/rest/model/GroupHistory.java | 101 ++ .../rest/model/GroupHistoryRecord.java | 194 ++++ .../generated/rest/model/GroupProperties.java | 179 ++++ .../rest/model/ListGroupsResponse.java | 128 +++ .../generated/rest/model/SchemaInfo.java | 179 ++++ .../rest/model/SchemaValidationRule.java | 92 ++ .../rest/model/SchemaValidationRules.java | 103 +++ .../rest/model/SchemaVersionsList.java | 102 +++ .../rest/model/SchemaWithVersion.java | 119 +++ .../rest/model/SerializationFormat.java | 154 ++++ .../model/UpdateValidationRulesRequest.java | 116 +++ .../contract/generated/rest/model/Valid.java | 92 ++ .../generated/rest/model/ValidateRequest.java | 117 +++ .../generated/rest/model/VersionInfo.java | 142 +++ .../rest/server/api/ApiException.java | 10 + .../rest/server/api/ApiOriginFilter.java | 22 + .../rest/server/api/ApiResponseMessage.java | 69 ++ .../generated/rest/server/api/Bootstrap.java | 31 + .../generated/rest/server/api/GroupsApi.java | 412 +++++++++ .../rest/server/api/GroupsApiService.java | 54 ++ .../rest/server/api/JacksonJsonProvider.java | 18 + .../rest/server/api/NotFoundException.java | 10 + .../generated/rest/server/api/SchemasApi.java | 74 ++ .../rest/server/api/SchemasApiService.java | 22 + .../generated/rest/server/api/StringUtil.java | 42 + .../factories/GroupsApiServiceFactory.java | 13 + .../factories/SchemasApiServiceFactory.java | 13 + .../server/api/impl/GroupsApiServiceImpl.java | 134 +++ .../api/impl/SchemasApiServiceImpl.java | 26 + .../contract/transform/ModelHelper.java | 243 +++++ .../schemaregistry/contract/v1/ApiV1.java | 554 +++++++++++ contract/src/main/swagger/README.md | 44 + contract/src/main/swagger/SchemaRegistry.yaml | 867 ++++++++++++++++++ contract/src/main/swagger/server.config.json | 8 + .../contract/transform/ModelHelperTest.java | 138 +++ settings.gradle | 4 + 66 files changed, 8244 insertions(+), 1 deletion(-) create mode 100644 client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java create mode 100644 client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java create mode 100644 client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientFactory.java create mode 100644 client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java create mode 100644 client/src/main/java/io/pravega/schemaregistry/client/exceptions/RegistryExceptions.java create mode 100644 client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java create mode 100644 common/src/main/java/io/pravega/schemaregistry/common/ContinuationTokenIterator.java create mode 100644 common/src/main/java/io/pravega/schemaregistry/common/Either.java create mode 100644 common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java create mode 100644 common/src/test/java/io/pravega/schemaregistry/common/ContinuationTokenIteratorTest.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingId.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupProperties.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaInfo.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRule.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaWithVersion.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/AddedTo.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CanRead.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CreateGroupRequest.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingId.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingInfo.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GetEncodingIdRequest.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistory.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupProperties.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ListGroupsResponse.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaInfo.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRule.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRules.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaVersionsList.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SerializationFormat.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateValidationRulesRequest.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Valid.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ValidateRequest.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/VersionInfo.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiException.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiOriginFilter.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiResponseMessage.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/Bootstrap.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApi.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApiService.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/JacksonJsonProvider.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/NotFoundException.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApi.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApiService.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/GroupsApiServiceFactory.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/SchemasApiServiceFactory.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/GroupsApiServiceImpl.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/SchemasApiServiceImpl.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java create mode 100644 contract/src/main/swagger/README.md create mode 100644 contract/src/main/swagger/SchemaRegistry.yaml create mode 100644 contract/src/main/swagger/server.config.json create mode 100644 contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java diff --git a/build.gradle b/build.gradle index a1f1bb5d6..52125793d 100644 --- a/build.gradle +++ b/build.gradle @@ -34,7 +34,6 @@ buildscript { } } dependencies { - classpath group: 'com.google.protobuf', name:'protobuf-gradle-plugin', version: protobufGradlePlugin classpath "gradle.plugin.org.nosphere.apache:creadur-rat-gradle:0.3.0" classpath group: 'org.hidetake', name: 'gradle-ssh-plugin', version: gradleSshPluginVersion classpath group: 'gradle.plugin.com.github.spotbugs', name: 'spotbugs-gradle-plugin', version: spotbugsPluginVersion @@ -122,6 +121,71 @@ allprojects { } } +project('common') { + dependencies { + compile group: 'commons-io', name: 'commons-io', version: commonsioVersion + compile group: 'com.google.guava', name: 'guava', version: guavaVersion + compile group: 'io.pravega', name: 'pravega-common', version: pravegaVersion + //Do NOT add any additional dependencies here. + } + + javadoc { + title = "Common Libraries" + dependsOn delombok + source = delombok.outputDir + failOnError = false + options.addBooleanOption("Xdoclint:none", true) + } +} + +project('client') { + dependencies { + compile project(':common') + compile project(':contract') + compile group: 'org.glassfish.jersey.ext', name: 'jersey-proxy-client', version: jerseyVersion + compile group: 'org.glassfish.jersey.core', name: 'jersey-client', version: jerseyVersion + testCompile group: 'org.slf4j', name: 'log4j-over-slf4j', version: slf4jApiVersion + testCompile group: 'ch.qos.logback', name: 'logback-classic', version: qosLogbackVersion + testCompile group: 'io.pravega', name: 'pravega-test-testcommon', version: pravegaVersion + } + + javadoc { + title = "Registry Client" + dependsOn delombok + source = delombok.outputDir + failOnError = false + exclude "**/impl/**"; + options.addBooleanOption("Xdoclint:all,-reference", true) + } +} + +project('contract') { + dependencies { + compile project(':common') + testCompile group: 'org.slf4j', name: 'log4j-over-slf4j', version: slf4jApiVersion + testCompile group: 'ch.qos.logback', name: 'logback-classic', version: qosLogbackVersion + compile group: 'javax.servlet', name: 'javax.servlet-api', version: javaxServletApiVersion + compile(group: 'io.swagger', name : 'swagger-jersey2-jaxrs', version :swaggerJersey2JaxrsVersion) { + exclude group: 'com.google.guava', module: 'guava' + } + compile group: 'org.glassfish.jersey.containers', name: 'jersey-container-grizzly2-http', version: jerseyVersion + compile group: 'org.glassfish.jersey.inject', name: 'jersey-hk2', version: jerseyVersion + compile group: 'org.glassfish.jersey.media', name: 'jersey-media-json-jackson', version: jerseyVersion + compile group: 'javax.xml.bind', name: 'jaxb-api', version: jaxbVersion + compile group: 'org.glassfish.jaxb', name: 'jaxb-runtime', version: jaxbVersion + + } + + javadoc { + title = "Registry Contract" + dependsOn delombok + source = delombok.outputDir + failOnError = false + exclude "**/impl/**"; + options.addBooleanOption("Xdoclint:all,-reference", true) + } +} + def getProjectVersion() { String ver = schemaregistryVersion if (grgit && ver.contains("-SNAPSHOT")) { diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java new file mode 100644 index 000000000..5ed87a3aa --- /dev/null +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java @@ -0,0 +1,358 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.client; + +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.GroupHistoryRecord; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SchemaValidationRules; +import io.pravega.schemaregistry.contract.data.SchemaWithVersion; +import io.pravega.schemaregistry.contract.data.VersionInfo; + +import javax.annotation.Nullable; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import static io.pravega.schemaregistry.client.exceptions.RegistryExceptions.*; + +/** + * Defines a registry client for interacting with schema registry service. + * The implementation of this interface should provide atomicity and read-after-write-consistency guarantees for all the methods. + */ +public interface SchemaRegistryClient { + /** + * Adds a new group. A group refers to the name under which the schemas are registered. A group is identified by a + * unique name and has an associated set of group metadata {@link GroupProperties} and a list of codec types and a + * versioned history of schemas that were registered under the group. + * Add group is idempotent. If the group by the same name already exists the api will return false. + * + * @param groupId Id for the group that uniquely identifies the group. + * @param groupProperties groupProperties Group properties for the group. These include serialization format, validation rules, + * and flag to declare whether multiple schemas representing distinct object types can be + * registered with the group. Type identify objects of same type. Schema compatibility checks + * are always performed for schemas that share same {@link SchemaInfo#type}. + * Additionally, a user defined map of properties can be supplied. + * @return True indicates if the group was added successfully, false if it exists. + * @throws BadArgumentException if the group properties is rejected by service. + * @throws UnauthorizedException if the user is unauthorized. + */ + boolean addGroup(String groupId, GroupProperties groupProperties) throws BadArgumentException, UnauthorizedException; + + /** + * Removes a group identified by the groupId. This will remove all the codec types and schemas registered under the group. + * Remove group is idempotent. + * + * @param groupId Id for the group that uniquely identifies the group. + * @throws UnauthorizedException if the user is unauthorized. + */ + void removeGroup(String groupId) throws UnauthorizedException; + + /** + * List all groups that the user is authorized on. This returns an iterator where each element is a pair of group + * name and group properties. + * This iterator can be used to iterate over each element until all elements are exhausted. + * The implementation should guarantee that all groups added before and until the iterator returns + * {@link Iterator#hasNext()} = true can be iterated over. + * + * @return map of names of groups with corresponding group properties for all groups. + * @throws UnauthorizedException if the user is unauthorized. + */ + Iterator> listGroups() throws UnauthorizedException; + + /** + * Get group properties for the group identified by the group id. + * + * {@link GroupProperties#serializationFormat} which identifies the serialization format is used to describe the schema. + * {@link GroupProperties#schemaValidationRules} sets the schema validation policy that needs to be enforced for evolving schemas. + * {@link GroupProperties#allowMultipleTypes} that specifies if multiple schemas are allowed to be registered in the group. + * Schemas are validated against existing schema versions that have the same {@link SchemaInfo#type}. + * {@link GroupProperties#properties} describes generic properties for a group. + * + * @param groupId Id for the group. + * @return Group properties which includes property like Serialization format and compatibility policy. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + GroupProperties getGroupProperties(String groupId) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Update group's schema validation policy. If previous rules are not supplied, then the update to the rules will be + * performed unconditionally. However, if previous rules are supplied, then the update will be performed if and only if + * existing {@link GroupProperties#schemaValidationRules} match previous rules. + * + * @param groupId Id for the group. + * @param validationRules New Schema validation rules for the group. + * @param previousRules Previous schema validation rules. + * @return true if the update was accepted by the service, false if it was rejected because of precondition failure. + * Precondition failure can occur if previous rules were specified and they do not match the rules set on the group. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + boolean updateSchemaValidationRules(String groupId, SchemaValidationRules validationRules, @Nullable SchemaValidationRules previousRules) + throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets list of latest schemas for each object types registered under the group. Objects are identified by {@link SchemaInfo#type}. + * Schemas are retrieved atomically. So all schemas added before this call will be returned by this call. + * + * @param groupId Id for the group. + * @return List of different objects within the group. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + List getSchemas(String groupId) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Registers schema to the group. Schemas are validated against existing schemas in the group that share the same + * {@link SchemaInfo#type}. + * If group is configured with {@link GroupProperties#allowMultipleTypes} then multiple schemas with distinct + * type {@link SchemaInfo#type} could be registered. + * All schemas with same type are assigned monotonically increasing version numbers. + * Add schema api is idempotent. If a schema is already registered, its version info is returned by the service. + * + * @param groupId Id for the group. + * @param schemaInfo Schema to add. + * @return versionInfo which uniquely identifies where the schema is added in the group. If schema is already registered, + * then the existing version info is returned. + * @throws SchemaValidationFailedException if the schema is deemed invalid by applying schema validation rules which may + * include comparing schema with existing schemas for compatibility in the desired direction. + * @throws SerializationMismatchException if serialization format does not match the group's configured serialization format. + * @throws MalformedSchemaException for known serialization formats, if the service is unable to parse the schema binary or + * for avro and protobuf if the {@link SchemaInfo#type} does not match the name of record/message in the binary. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + VersionInfo addSchema(String groupId, SchemaInfo schemaInfo) throws SchemaValidationFailedException, SerializationMismatchException, + MalformedSchemaException, ResourceNotFoundException, UnauthorizedException; + + /** + * Api to delete schema corresponding to the version. Users should be very careful while using this API in production, + * esp if the schema has already been used to write the data. + * Delete schema api is idempotent. + * This does a soft delete of the schema. So getSchemaVersion with the version info will still return the schema. + * However, the schema will not participate in any compatibility checks once deleted. + * It will not be included in listing schema versions for the group using apis like {@link SchemaRegistryClient#getSchemaVersions} + * or {@link SchemaRegistryClient#getGroupHistory} or {@link SchemaRegistryClient#getSchemas} or + * {@link SchemaRegistryClient#getLatestSchemaVersion} + * If add schema is called again using this deleted schema will result in a new version being assigned to it upon registration. + * + * @param groupId Id for the group. + * @param versionInfo Version which uniquely identifies schema within a group. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + void deleteSchemaVersion(String groupId, VersionInfo versionInfo) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Api to delete schema corresponding to the schemaType and version. + * Users should be very careful while using this API in production, esp if the schema has already been used to write the data. + * Delete schema api is idempotent. + * This does a soft delete of the schema. So getSchemaVersion with the version info will still return the schema. + * However, the schema will not participate in any compatibility checks once deleted. + * It will not be included in listing schema versions for the group using apis like {@link SchemaRegistryClient#getSchemaVersions} + * or {@link SchemaRegistryClient#getGroupHistory} or {@link SchemaRegistryClient#getSchemas} or + * {@link SchemaRegistryClient#getLatestSchemaVersion} + * If add schema is called again using this deleted schema will result in a new version being assigned to upon registration. + * + * @param groupId Id for the group. + * @param schemaType schemaType that identifies the type of object the schema represents. This should be same as the + * value specified in {@link SchemaInfo#type}. + * @param version Version number which uniquely identifies schema for the schemaType within a group. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + void deleteSchemaVersion(String groupId, String schemaType, int version) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets schema corresponding to the version. + * + * @param groupId Id for the group. + * @param versionInfo Version which uniquely identifies schema within a group. + * @return Schema info corresponding to the version info. + * @throws ResourceNotFoundException if group or version is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + SchemaInfo getSchemaForVersion(String groupId, VersionInfo versionInfo) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets schema corresponding to the version. + * + * @param groupId Id for the group. + * @param schemaType schemaType as specified in the {@link SchemaInfo#type} while registering the schema. + * @param version Version which uniquely identifies schema of schemaType within a group. + * @return Schema info corresponding to the version info. + * @throws ResourceNotFoundException if group or version is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + SchemaInfo getSchemaForVersion(String groupId, String schemaType, int version) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets encoding info against the requested encoding Id. The purpose of encoding info is to uniquely identify the encoding + * used on the data at rest. The encoding covers two parts - + * 1. Schema that defines the structure of the data and is used for serialization. A specific schema version registered with + * registry service is uniquely identified by the corresponding VersionInfo object. + * 2. CodecType that is used to encode the serialized data. This would typically be some compression. The codecType + * and schema should both be registered with the service and service will generate a unique identifier for each such + * pair. + * Encoding Info uniquely identifies a combination of a versionInfo and codecType. + * EncodingInfo also includes the {@link SchemaInfo} identified by the {@link VersionInfo}. + * + * @param groupId Id for the group. + * @param encodingId Encoding id that uniquely identifies a schema within a group. + * @return Encoding info corresponding to the encoding id. + * @throws ResourceNotFoundException if group or encoding id is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + EncodingInfo getEncodingInfo(String groupId, EncodingId encodingId) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets an encoding id that uniquely identifies a combination of Schema version and codec type. + * This encoding id is a 4 byte integer and it can be used to tag the data which is serialized and encoded using the + * schema version and codecType identified by this encoding id. + * This api is idempotent. And if an encoding id is generated for a version and codec pair, subsequent requests to this + * api will return the generated encoding id. + * If the schema identified by the version is deleted using {@link SchemaRegistryClient#deleteSchemaVersion} api, + * then if the encoding id was already generated for the pair of schema version and codec, then it will be returned. + * However, if no encoding id for the versioninfo and codec pair was generated and the schema version was deleted, + * then any call to getEncodingId using the deleted versionInfo will throw ResourceNotFoundException. + * + * @param groupId Id for the group. + * @param versionInfo version of schema + * @param codecType codec type + * @return Encoding id for the pair of version and codec type. + * @throws CodecTypeNotRegisteredException if codectype is not registered with the group. Use {@link SchemaRegistryClient#addCodecType} + * @throws ResourceNotFoundException if group or version info is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + EncodingId getEncodingId(String groupId, VersionInfo versionInfo, String codecType) + throws CodecTypeNotRegisteredException, ResourceNotFoundException, UnauthorizedException; + + /** + * Gets latest schema and version for the group (or type, if specified). + * To get latest schema version for a specific type identified by {@link SchemaInfo#type}, provide the type. + * Otherwise if the group is configured to allow multiple schemas {@link GroupProperties#allowMultipleTypes}, then + * and type is not specified, then last schema added to the group across all types will be returned. + * + * @param groupId Id for the group. + * @param schemaType Type of object identified by {@link SchemaInfo#type}. + * + * @return Schema with version for the last schema that was added to the group (or type). + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schemaType) + throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets version corresponding to the schema. + * For each schema type {@link SchemaInfo#type} and {@link SchemaInfo#serializationFormat} a versionInfo object uniquely + * identifies each distinct {@link SchemaInfo#schemaData}. + * + * @param groupId Id for the group. + * @param schemaInfo SchemaInfo that describes format and structure. + * @return VersionInfo corresponding to schema. + * @throws ResourceNotFoundException if group is not found or if schema is not registered. + * @throws UnauthorizedException if the user is unauthorized. + */ + VersionInfo getVersionForSchema(String groupId, SchemaInfo schemaInfo) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets all schemas with corresponding versions for the group (or type, if specified). + * For groups configured with {@link GroupProperties#allowMultipleTypes}, the type {@link SchemaInfo#type} should be + * supplied to view schemas specific to a type. if type is null, all schemas in the group are returned. + * The order in the list matches the order in which schemas were evolved within the group. + * + * @param groupId Id for the group. + * @param schemaType type of object identified by {@link SchemaInfo#type}. + * @return Ordered list of schemas with versions and validation rules for all schemas in the group. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + List getSchemaVersions(String groupId, @Nullable String schemaType) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Checks whether given schema is valid by applying validation rules against previous schemas in the group + * subject to current {@link GroupProperties#schemaValidationRules} policy. + * This api performs exactly the same validations as {@link SchemaRegistryClient#addSchema(String, SchemaInfo)} + * but without registering the schema. This is primarily intended to be used during schema development phase to validate that + * the changes to schema are in compliance with validation rules for the group. + * + * @param groupId Id for the group. + * @param schemaInfo Schema to check for validity. + * @return A schema is valid if it passes all the {@link GroupProperties#schemaValidationRules}. The rule supported + * presently, is Compatibility. If desired compatibility is satisfied by the schema then this api returns true, false otherwise. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + boolean validateSchema(String groupId, SchemaInfo schemaInfo) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Checks whether given schema can be used to read by validating it for reads against one or more existing schemas in the group + * subject to current {@link GroupProperties#schemaValidationRules} policy. + * + * @param groupId Id for the group. + * @param schemaInfo Schema to check to be used for reads. + * @return True if it can be used to read, false otherwise. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + boolean canReadUsing(String groupId, SchemaInfo schemaInfo) throws ResourceNotFoundException, UnauthorizedException; + + /** + * List of codec types used for encoding in the group. + * + * @param groupId Id for the group. + * @return List of codec types used for encoding in the group. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + List getCodecTypes(String groupId) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Add new codec type to be used in encoding in the group. + * + * @param groupId Id for the group. + * @param codecType codec type. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + void addCodecType(String groupId, String codecType) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Gets complete schema evolution history of the group with schemas, versions, rules and time for the group. + * The order in the list matches the order in which schemas were evolved within the group. + * This call is atomic and will get a consistent view at the time when the request is processed on the service. + * So all schemas that were added before this call are returned and all schemas that were deleted before this call + * are excluded. + * + * @param groupId Id for the group. + * @return Ordered list of schemas with versions and validation rules for all schemas in the group. + * @throws ResourceNotFoundException if group is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + List getGroupHistory(String groupId) throws ResourceNotFoundException, UnauthorizedException; + + /** + * Finds all groups and corresponding version info for the groups where the supplied schema has been registered. + * It is important to note that the same schema type could be part of multiple group, however in each group it + * may have gone through a separate evolution. This api simply identifies all groups where the specific schema + * (type, format and binary) is used. + * The user defined {@link SchemaInfo#properties} is not used for comparison. + * + * @param schemaInfo Schema info to find references for. + * @return Map of group Id to versionInfo identifier for the schema in that group. + * @throws ResourceNotFoundException if schema is not found. + * @throws UnauthorizedException if the user is unauthorized. + */ + Map getSchemaReferences(SchemaInfo schemaInfo) throws ResourceNotFoundException, UnauthorizedException; +} diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java new file mode 100644 index 000000000..66bb39921 --- /dev/null +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java @@ -0,0 +1,31 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.client; + +import lombok.Builder; +import lombok.Data; + +import java.net.URI; + +/** + * Registry client configuration used to create registry client. + */ +@Data +@Builder +public class SchemaRegistryClientConfig { + /** + * URI for connecting with registry client. + */ + private final URI schemaRegistryUri; + + private SchemaRegistryClientConfig(URI schemaRegistryUri) { + this.schemaRegistryUri = schemaRegistryUri; + } +} diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientFactory.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientFactory.java new file mode 100644 index 000000000..caba3d815 --- /dev/null +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientFactory.java @@ -0,0 +1,25 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.client; + +/** + * Factory class for creating Schema Registry client. + */ +public class SchemaRegistryClientFactory { + /** + * Factory method to create Schema Registry Client. + * + * @param config Configuration for creating registry client. + * @return SchemaRegistry client implementation + */ + public static SchemaRegistryClient createRegistryClient(SchemaRegistryClientConfig config) { + return new SchemaRegistryClientImpl(config.getSchemaRegistryUri()); + } +} diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java new file mode 100644 index 000000000..5e46b69c1 --- /dev/null +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java @@ -0,0 +1,461 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.client; + +import com.google.common.annotations.VisibleForTesting; +import io.pravega.common.Exceptions; +import io.pravega.common.util.Retry; +import io.pravega.schemaregistry.common.ContinuationTokenIterator; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.GroupHistoryRecord; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SchemaValidationRules; +import io.pravega.schemaregistry.contract.data.SchemaWithVersion; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; +import io.pravega.schemaregistry.contract.generated.rest.model.CreateGroupRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.GetEncodingIdRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; +import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.Valid; +import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; +import io.pravega.schemaregistry.contract.transform.ModelHelper; +import io.pravega.schemaregistry.contract.v1.ApiV1; +import org.glassfish.jersey.client.ClientConfig; +import org.glassfish.jersey.client.proxy.WebResourceFactory; + +import javax.annotation.Nullable; +import javax.ws.rs.client.Client; +import javax.ws.rs.client.ClientBuilder; +import javax.ws.rs.core.Response; +import java.net.URI; +import java.util.AbstractMap; +import java.util.Collection; +import java.util.Comparator; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static io.pravega.schemaregistry.client.exceptions.RegistryExceptions.*; + +public class SchemaRegistryClientImpl implements SchemaRegistryClient { + private static final Retry.RetryAndThrowConditionally RETRY = Retry + .withExpBackoff(100, 2, 10, 1000) + .retryWhen(x -> Exceptions.unwrap(x) instanceof ConnectionException); + private static final int GROUP_LIMIT = 100; + private static final int SCHEMA_LIMIT = 10; + + private final ApiV1.GroupsApi groupProxy; + private final ApiV1.SchemasApi schemaProxy; + + SchemaRegistryClientImpl(URI uri) { + Client client = ClientBuilder.newClient(new ClientConfig()); + this.groupProxy = WebResourceFactory.newResource(ApiV1.GroupsApi.class, client.target(uri)); + this.schemaProxy = WebResourceFactory.newResource(ApiV1.SchemasApi.class, client.target(uri)); + } + + @VisibleForTesting + SchemaRegistryClientImpl(ApiV1.GroupsApi groupProxy) { + this(groupProxy, null); + } + + @VisibleForTesting + SchemaRegistryClientImpl(ApiV1.GroupsApi groupProxy, ApiV1.SchemasApi schemaProxy) { + this.groupProxy = groupProxy; + this.schemaProxy = schemaProxy; + } + + @Override + public boolean addGroup(String groupId, GroupProperties groupProperties) { + return withRetry(() -> { + CreateGroupRequest request = new CreateGroupRequest().groupName(groupId).groupProperties(ModelHelper.encode(groupProperties)); + Response response = groupProxy.createGroup(request); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case CREATED: + return true; + case CONFLICT: + return false; + case BAD_REQUEST: + throw new BadArgumentException("Group properties invalid. Verify that schema validation rules include compatibility."); + default: + throw new InternalServerError("Internal Service error. Failed to add the group."); + } + }); + } + + @Override + public void removeGroup(String groupId) { + withRetry(() -> { + Response response = groupProxy.deleteGroup(groupId); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case NO_CONTENT: + return; + default: + throw new InternalServerError("Internal Service error. Failed to remove the group."); + } + }); + } + + @Override + public Iterator> listGroups() { + final Function>>> function = + continuationToken -> { + ListGroupsResponse entity = getListGroupsResponse(continuationToken); + List> map = new LinkedList<>(); + for (Map.Entry entry : entity.getGroups().entrySet()) { + ModelHelper.decode(entry.getValue().getSerializationFormat()); + map.add(new AbstractMap.SimpleEntry<>(entry.getKey(), ModelHelper.decode(entry.getValue()))); + } + return new AbstractMap.SimpleEntry<>(entity.getContinuationToken(), map); + }; + + return new ContinuationTokenIterator<>(function, null); + } + + private ListGroupsResponse getListGroupsResponse(String continuationToken) { + return withRetry(() -> { + Response response = groupProxy.listGroups(continuationToken, GROUP_LIMIT); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return response.readEntity(ListGroupsResponse.class); + default: + throw new InternalServerError("Internal Service error. Failed to list groups."); + } + }); + } + + @Override + public GroupProperties getGroupProperties(String groupId) { + return withRetry(() -> { + Response response = groupProxy.getGroupProperties(groupId); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties.class)); + case NOT_FOUND: + throw new ResourceNotFoundException("Group not found."); + default: + throw new InternalServerError("Internal Service error. Failed to list groups."); + } + }); + } + + @Override + public boolean updateSchemaValidationRules(String groupId, SchemaValidationRules validationRules, @Nullable SchemaValidationRules previousRules) { + return withRetry(() -> { + UpdateValidationRulesRequest request = new UpdateValidationRulesRequest() + .validationRules(ModelHelper.encode(validationRules)); + if (previousRules != null) { + request.setPreviousRules(ModelHelper.encode(previousRules)); + } + + Response response = groupProxy.updateSchemaValidationRules(groupId, request); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case CONFLICT: + return false; + case NOT_FOUND: + throw new ResourceNotFoundException("Group not found."); + case OK: + return true; + default: + throw new InternalServerError("Internal Service error. Failed to update schema validation rules."); + } + }); + } + + @Override + public List getSchemas(String groupId) { + return latestSchemas(groupId, null); + } + + private List latestSchemas(String groupId, String type) { + return withRetry(() -> { + Response response = groupProxy.getSchemas(groupId, type); + SchemaVersionsList objectsList = response.readEntity(SchemaVersionsList.class); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return objectsList.getSchemas().stream().map(ModelHelper::decode).collect(Collectors.toList()); + case NOT_FOUND: + throw new ResourceNotFoundException("Group not found."); + default: + throw new InternalServerError("Internal Service error. Failed to get object types."); + } + }); + } + + @Override + public VersionInfo addSchema(String groupId, SchemaInfo schemaInfo) { + return withRetry(() -> { + Response response = groupProxy.addSchema(groupId, ModelHelper.encode(schemaInfo)); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case CREATED: + return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo.class)); + case NOT_FOUND: + throw new ResourceNotFoundException("Group not found."); + case CONFLICT: + throw new SchemaValidationFailedException("Schema is incompatible."); + case EXPECTATION_FAILED: + throw new SerializationMismatchException("Serialization format disallowed."); + case BAD_REQUEST: + throw new MalformedSchemaException("Schema is malformed. Verify the schema data and type"); + default: + throw new InternalServerError("Internal Service error. Failed to addSchema."); + } + }); + } + + @Override + public void deleteSchemaVersion(String groupId, VersionInfo versionInfo) { + withRetry(() -> { + Response response = groupProxy.deleteSchemaFromVersionOrdinal(groupId, versionInfo.getOrdinal()); + if (response.getStatus() == Response.Status.NOT_FOUND.getStatusCode()) { + throw new ResourceNotFoundException("Group not found."); + } else if (response.getStatus() != Response.Status.NO_CONTENT.getStatusCode()) { + throw new InternalServerError("Internal Service error. Failed to get schema."); + } + }); + } + + @Override + public void deleteSchemaVersion(String groupId, String schemaType, int version) { + withRetry(() -> { + Response response = groupProxy.deleteSchemaVersion(groupId, schemaType, version); + if (response.getStatus() == Response.Status.NOT_FOUND.getStatusCode()) { + throw new ResourceNotFoundException("Group not found."); + } else if (response.getStatus() != Response.Status.NO_CONTENT.getStatusCode()) { + throw new InternalServerError("Internal Service error. Failed to get schema."); + } + }); + } + + @Override + public SchemaInfo getSchemaForVersion(String groupId, VersionInfo versionInfo) { + return withRetry(() -> { + Response response = groupProxy.getSchemaFromVersionOrdinal(groupId, versionInfo.getOrdinal()); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo.class)); + case NOT_FOUND: + throw new ResourceNotFoundException("Schema not found."); + default: + throw new InternalServerError("Internal Service error. Failed to get schema."); + } + }); + } + + @Override + public SchemaInfo getSchemaForVersion(String groupId, String schemaType, int version) { + return withRetry(() -> { + Response response = groupProxy.getSchemaFromVersion(groupId, schemaType, version); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo.class)); + case NOT_FOUND: + throw new ResourceNotFoundException("Schema not found."); + default: + throw new InternalServerError("Internal Service error. Failed to get schema."); + } + }); + } + + @Override + public EncodingInfo getEncodingInfo(String groupId, EncodingId encodingId) { + return withRetry(() -> { + Response response = groupProxy.getEncodingInfo(groupId, encodingId.getId()); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo.class)); + case NOT_FOUND: + throw new ResourceNotFoundException("Encoding not found."); + default: + throw new InternalServerError("Internal Service error. Failed to get encoding info."); + } + }); + } + + @Override + public EncodingId getEncodingId(String groupId, VersionInfo versionInfo, String codecType) { + return withRetry(() -> { + GetEncodingIdRequest getEncodingIdRequest = new GetEncodingIdRequest(); + getEncodingIdRequest.codecType(codecType) + .versionInfo(ModelHelper.encode(versionInfo)); + Response response = groupProxy.getEncodingId(groupId, getEncodingIdRequest); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.EncodingId.class)); + case NOT_FOUND: + throw new ResourceNotFoundException("getEncodingId failed. Either Group or Version does not exist."); + case PRECONDITION_FAILED: + throw new CodecTypeNotRegisteredException(String.format("Codec type %s not registered.", codecType)); + default: + throw new InternalServerError("Internal Service error. Failed to get encoding info."); + } + }); + } + + @Override + public SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schemaType) { + List list = latestSchemas(groupId, schemaType); + if (schemaType == null) { + return list.stream().max(Comparator.comparingInt(x -> x.getVersionInfo().getOrdinal())).orElse(null); + } else { + return list.get(0); + } + } + + @Override + public List getSchemaVersions(String groupId, @Nullable String schemaType) { + return withRetry(() -> { + Response response = groupProxy.getSchemaVersions(groupId, schemaType); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + SchemaVersionsList schemaList = response.readEntity(SchemaVersionsList.class); + return schemaList.getSchemas().stream().map(ModelHelper::decode).collect(Collectors.toList()); + case NOT_FOUND: + throw new ResourceNotFoundException("getSchemaVersions failed. Group does not exist."); + default: + throw new InternalServerError("Internal Service error. Failed to get schema versions for group."); + } + }); + } + + @Override + public List getGroupHistory(String groupId) { + return withRetry(() -> { + Response response = groupProxy.getGroupHistory(groupId); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory history = response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory.class); + return history.getHistory().stream().map(ModelHelper::decode).collect(Collectors.toList()); + case NOT_FOUND: + throw new ResourceNotFoundException("getGroupHistory failed. Either Group or Version does not exist."); + default: + throw new InternalServerError("Internal Service error. Failed to get schema evolution history for group."); + } + }); + } + + @Override + public Map getSchemaReferences(SchemaInfo schemaInfo) throws ResourceNotFoundException, UnauthorizedException { + return withRetry(() -> { + Response response = schemaProxy.getSchemaReferences(ModelHelper.encode(schemaInfo)); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + io.pravega.schemaregistry.contract.generated.rest.model.AddedTo addedTo = response + .readEntity(io.pravega.schemaregistry.contract.generated.rest.model.AddedTo.class); + return addedTo.getGroups().entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> ModelHelper.decode(x.getValue()))); + case NOT_FOUND: + throw new ResourceNotFoundException("getSchemaReferences failed. Either Group or Version does not exist."); + default: + throw new InternalServerError("Internal Service error. Failed to get schema evolution history for group."); + } + }); + } + + @Override + public VersionInfo getVersionForSchema(String groupId, SchemaInfo schema) { + return withRetry(() -> { + io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo schemaInfo = ModelHelper.encode(schema); + + Response response = groupProxy.getSchemaVersion(groupId, schemaInfo); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo.class)); + case NOT_FOUND: + throw new ResourceNotFoundException("Schema not found."); + default: + throw new InternalServerError("Internal Service error. Failed to get schema version."); + } + }); + } + + @Override + public boolean validateSchema(String groupId, SchemaInfo schemaInfo) { + return withRetry(() -> { + ValidateRequest validateRequest = new ValidateRequest() + .schemaInfo(ModelHelper.encode(schemaInfo)); + Response response = groupProxy.validate(groupId, validateRequest); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return response.readEntity(Valid.class).isValid(); + case NOT_FOUND: + throw new ResourceNotFoundException("Group not found."); + default: + throw new InternalServerError("Internal Service error."); + } + }); + } + + @Override + public boolean canReadUsing(String groupId, SchemaInfo schemaInfo) { + return withRetry(() -> { + io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo request = ModelHelper.encode(schemaInfo); + Response response = groupProxy.canRead(groupId, request); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return response.readEntity(CanRead.class).isCompatible(); + case NOT_FOUND: + throw new ResourceNotFoundException("Schema not found."); + default: + throw new InternalServerError("Internal Service error."); + } + }); + } + + @Override + public List getCodecTypes(String groupId) { + return withRetry(() -> { + Response response = groupProxy.getCodecTypesList(groupId); + CodecTypesList list = response.readEntity(CodecTypesList.class); + switch (Response.Status.fromStatusCode(response.getStatus())) { + case OK: + return list.getCodecTypes(); + case NOT_FOUND: + throw new ResourceNotFoundException("Group not found."); + default: + throw new InternalServerError("Failed to get codecTypes. Internal server error."); + } + }); + } + + @Override + public void addCodecType(String groupId, String codecType) { + withRetry(() -> { + Response response = groupProxy.addCodecType(groupId, codecType); + + switch (Response.Status.fromStatusCode(response.getStatus())) { + case CREATED: + return; + case NOT_FOUND: + throw new ResourceNotFoundException("Group not found."); + default: + throw new InternalServerError("Failed to add codec type. Internal server error."); + } + }); + } + + private T withRetry(Supplier supplier) { + return RETRY.run(supplier::get); + } + + private void withRetry(Runnable runnable) { + RETRY.run(() -> { + runnable.run(); + return null; + }); + } +} diff --git a/client/src/main/java/io/pravega/schemaregistry/client/exceptions/RegistryExceptions.java b/client/src/main/java/io/pravega/schemaregistry/client/exceptions/RegistryExceptions.java new file mode 100644 index 000000000..7bbb28966 --- /dev/null +++ b/client/src/main/java/io/pravega/schemaregistry/client/exceptions/RegistryExceptions.java @@ -0,0 +1,187 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.client.exceptions; + +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Getter +public class RegistryExceptions extends RuntimeException { + /** + * Enum to describe the type of exception. + */ + public enum Type { + UNAUTHORIZED, + BAD_ARGUMENT, + PRECONDITION_FAILED, + CODEC_NOT_FOUND, + MALFORMED_SCHEMA, + INCOMPATIBLE_SCHEMA, + RESOURCE_NOT_FOUND, + SERIALIZATION_FORMAT_MISMATCH, + CONNECTION_ERROR, + INTERNAL_SERVER_ERROR + } + + /** + * Trait to identify whether an exception is retryable or not. + */ + public interface RetryableException { + } + + /** + * Construct a StoreException. + * + * @param errorMessage The detailed error message. + */ + public RegistryExceptions(final String errorMessage) { + super(errorMessage); + } + + /** + * Factory method to construct Store exceptions. + * + * @param type Type of Exception. + * @param errorMessage The detailed error message. + * @return Instance of type of StoreException. + */ + public static RegistryExceptions create(final Type type, final String errorMessage) { + Preconditions.checkArgument(errorMessage != null && !errorMessage.isEmpty(), + "Either cause or errorMessage should be non-empty"); + RegistryExceptions exception; + switch (type) { + case UNAUTHORIZED: + exception = new UnauthorizedException(errorMessage); + break; + case BAD_ARGUMENT: + exception = new BadArgumentException(errorMessage); + break; + case PRECONDITION_FAILED: + exception = new PreconditionFailedException(errorMessage); + break; + case CODEC_NOT_FOUND: + exception = new CodecTypeNotRegisteredException(errorMessage); + break; + case INCOMPATIBLE_SCHEMA: + exception = new SchemaValidationFailedException(errorMessage); + break; + case RESOURCE_NOT_FOUND: + exception = new ResourceNotFoundException(errorMessage); + break; + case SERIALIZATION_FORMAT_MISMATCH: + exception = new SerializationMismatchException(errorMessage); + break; + case CONNECTION_ERROR: + exception = new ConnectionException(errorMessage); + break; + case INTERNAL_SERVER_ERROR: + exception = new InternalServerError(errorMessage); + break; + default: + throw new IllegalArgumentException("Invalid exception type"); + } + return exception; + } + + /** + * User is unauthorized to perform requested action. + */ + public static class UnauthorizedException extends RegistryExceptions { + public UnauthorizedException(String errorMessage) { + super(errorMessage); + } + } + + /** + * Service rejected the supplied arguments with bad argument exception. + */ + public static class BadArgumentException extends RegistryExceptions { + public BadArgumentException(String errorMessage) { + super(errorMessage); + } + } + + /** + * Service rejected the request because the expected precondition for the requested action was not satisfied. + */ + public static class PreconditionFailedException extends RegistryExceptions { + public PreconditionFailedException(String errorMessage) { + super(errorMessage); + } + } + + /** + * The requested codecType is not added to the group. + */ + public static class CodecTypeNotRegisteredException extends RegistryExceptions { + public CodecTypeNotRegisteredException(String errorMessage) { + super(errorMessage); + } + } + + /** + * Schema is malformed. Verify the schema data and type. + */ + public static class MalformedSchemaException extends RegistryExceptions { + public MalformedSchemaException(String errorMessage) { + super(errorMessage); + } + } + + /** + * The schema validation failed as it was validated against the ValidationRules set for the group. + */ + public static class SchemaValidationFailedException extends RegistryExceptions { + public SchemaValidationFailedException(String errorMessage) { + super(errorMessage); + } + } + + /** + * Requested resource not found. + */ + public static class ResourceNotFoundException extends RegistryExceptions { + public ResourceNotFoundException(String errorMessage) { + super(errorMessage); + } + } + + /** + * Serialization format is not allowed for the group. Check {@link SchemaInfo#serializationFormat} with + * {@link GroupProperties#serializationFormat}. + */ + public static class SerializationMismatchException extends RegistryExceptions { + public SerializationMismatchException(String errorMessage) { + super(errorMessage); + } + } + + /** + * Exception type due to failure in connecting to the service. + */ + public static class ConnectionException extends RegistryExceptions implements RetryableException { + public ConnectionException(String errorMessage) { + super(errorMessage); + } + } + + /** + * The request processing failed on the service. + */ + public static class InternalServerError extends RegistryExceptions implements RetryableException { + public InternalServerError(String errorMessage) { + super(errorMessage); + } + } +} diff --git a/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java b/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java new file mode 100644 index 000000000..f4427b183 --- /dev/null +++ b/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java @@ -0,0 +1,612 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.client; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import io.pravega.schemaregistry.contract.data.Compatibility; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SchemaValidationRules; +import io.pravega.schemaregistry.contract.data.SchemaWithVersion; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; +import io.pravega.schemaregistry.contract.generated.rest.model.Valid; +import io.pravega.schemaregistry.contract.transform.ModelHelper; +import io.pravega.schemaregistry.contract.v1.ApiV1; +import io.pravega.test.common.AssertExtensions; +import lombok.val; +import org.junit.Test; + +import javax.ws.rs.core.Response; +import java.nio.ByteBuffer; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import static io.pravega.schemaregistry.client.exceptions.RegistryExceptions.*; +import static org.junit.Assert.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + +public class TestSchemaRegistryClient { + @Test + public void testGroup() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + + // add group + // 1. success response code + io.pravega.schemaregistry.contract.data.GroupProperties groupProperties = new io.pravega.schemaregistry.contract.data.GroupProperties( + SerializationFormat.Avro, SchemaValidationRules.of(Compatibility.backward()), true); + doReturn(response).when(proxy).createGroup(any()); + doReturn(Response.Status.CREATED.getStatusCode()).when(response).getStatus(); + boolean addGroup = client.addGroup("grp1", groupProperties); + assertTrue(addGroup); + + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + addGroup = client.addGroup("grp1", groupProperties); + assertFalse(addGroup); + + doReturn(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("Exception should have been thrown", + () -> client.addGroup("grp1", groupProperties), + e -> e instanceof InternalServerError); + reset(response); + + // list groups + doReturn(response).when(proxy).listGroups(null, 100); + Response response2 = mock(Response.class); + doReturn(response2).when(proxy).listGroups("token", 100); + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + doReturn(Response.Status.OK.getStatusCode()).when(response2).getStatus(); + GroupProperties mygroup = new GroupProperties().properties(Collections.emptyMap()) + .serializationFormat(new io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat() + .serializationFormat(io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat.SerializationFormatEnum.ANY)) + .schemaValidationRules(ModelHelper.encode(SchemaValidationRules.of(Compatibility.backward()))) + .allowMultipleTypes(false); + String groupName = "mygroup"; + ListGroupsResponse groupList = new ListGroupsResponse().groups(Collections.singletonMap(groupName, mygroup)).continuationToken("token"); + doReturn(groupList).when(response).readEntity(eq(ListGroupsResponse.class)); + doReturn(new ListGroupsResponse().groups(Collections.emptyMap()).continuationToken("token")).when(response2).readEntity(eq(ListGroupsResponse.class)); + + val groups = Lists.newArrayList(client.listGroups()); + assertEquals(1, groups.size()); + assertTrue(groups.stream().anyMatch(x -> x.getKey().equals(groupName))); + Map.Entry group = + groups.stream().filter(x -> x.getKey().equals(groupName)).findAny().orElseThrow(RuntimeException::new); + assertEquals(group.getValue().getSerializationFormat(), SerializationFormat.Any); + assertEquals(group.getValue().getSchemaValidationRules().getRules().get(Compatibility.class.getSimpleName()), Compatibility.backward()); + + reset(response); + } + + @Test + public void testListGroup() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + GroupProperties mygroup = new GroupProperties().properties(Collections.emptyMap()) + .serializationFormat(new io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat() + .serializationFormat(io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat.SerializationFormatEnum.ANY)) + .schemaValidationRules(ModelHelper.encode(SchemaValidationRules.of(Compatibility.backward()))) + .allowMultipleTypes(false); + String groupId = "mygroup"; + ListGroupsResponse groupList = new ListGroupsResponse().groups(Collections.singletonMap(groupId, mygroup)).continuationToken("token"); + ListGroupsResponse groupList2 = new ListGroupsResponse().groups(Collections.emptyMap()).continuationToken("token"); + doReturn(response).when(proxy).listGroups(null, 100); + Response response2 = mock(Response.class); + doReturn(response2).when(proxy).listGroups("token", 100); + doReturn(Response.Status.OK.getStatusCode()).when(response2).getStatus(); + + doReturn(groupList).when(response).readEntity(eq(ListGroupsResponse.class)); + doReturn(groupList2).when(response2).readEntity(eq(ListGroupsResponse.class)); + val groups = Lists.newArrayList(client.listGroups()); + assertEquals(1, groups.size()); + assertTrue(groups.stream().anyMatch(x -> x.getKey().equals(groupId))); + Map.Entry group = + groups.stream().filter(x -> x.getKey().equals(groupId)).findAny().orElseThrow(RuntimeException::new); + assertEquals(group.getValue().getSerializationFormat(), SerializationFormat.Any); + assertEquals(group.getValue().getSchemaValidationRules().getRules().get(Compatibility.class.getSimpleName()), Compatibility.backward()); + + // Runtime Exception + doReturn(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("Exception should have been thrown", () -> Lists.newArrayList(client.listGroups()), e -> e instanceof InternalServerError); + } + + @Test + public void testRemoveGroup() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).deleteGroup(anyString()); + doReturn(Response.Status.NO_CONTENT.getStatusCode()).when(response).getStatus(); + + client.removeGroup("mygroup"); + + // not OK response + doReturn(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", () -> client.removeGroup("mygroup"), + e -> e instanceof InternalServerError); + } + + @Test + public void testGetGroupProperties() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getGroupProperties(anyString()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + GroupProperties mygroup + = new GroupProperties().properties(Collections.emptyMap()) + .serializationFormat(new io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat() + .serializationFormat( + io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat.SerializationFormatEnum.ANY)) + .schemaValidationRules(ModelHelper.encode(SchemaValidationRules.of(Compatibility.backward()))) + .allowMultipleTypes(false); + doReturn(mygroup).when(response).readEntity(eq(GroupProperties.class)); + io.pravega.schemaregistry.contract.data.GroupProperties groupProperties = client.getGroupProperties("mygroup"); + assertEquals(groupProperties.getSerializationFormat(), SerializationFormat.Any); + assertEquals(groupProperties.getSchemaValidationRules().getRules().get(Compatibility.class.getSimpleName()), + Compatibility.backward()); + // ResourceNotFoundException + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", () -> client.getGroupProperties( + "mygroup"), e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", () -> client.getGroupProperties( + "mygroup"), e -> e instanceof InternalServerError); + } + + @Test + public void testUpdateSchemaValidationRules() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).updateSchemaValidationRules(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + SchemaValidationRules schemaValidationRules = SchemaValidationRules.of(Compatibility.backward()); + client.updateSchemaValidationRules("mygroup", schemaValidationRules, null); + assertEquals(response.getStatus(), Response.Status.OK.getStatusCode()); + // Precondition Failed + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + assertFalse(client.updateSchemaValidationRules("mygroup", schemaValidationRules, null)); + // NotFound exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.updateSchemaValidationRules("mygroup", schemaValidationRules, null), + e -> e instanceof ResourceNotFoundException); + // Runtime Exception + doReturn(Response.Status.EXPECTATION_FAILED.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.updateSchemaValidationRules("mygroup", schemaValidationRules, null), + e -> e instanceof InternalServerError); + } + + @Test + public void testSchemasApi() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getSchemas(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + VersionInfo versionInfo = new VersionInfo("schema1", 5, 5); + io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion schemaVersion = new io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion() + .schemaInfo(ModelHelper.encode(schemaInfo)).version(ModelHelper.encode(versionInfo)); + SchemaVersionsList schemaList = new SchemaVersionsList(); + schemaList.addSchemasItem(schemaVersion); + doReturn(schemaList).when(response).readEntity(SchemaVersionsList.class); + List output = client.getSchemas("mygroup"); + assertEquals(1, output.size()); + assertEquals("schema1", output.get(0).getSchemaInfo().getType()); + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", () -> client.getSchemas("mygroup"), + e -> e instanceof ResourceNotFoundException); + // Runtime exception + doReturn(Response.Status.EXPECTATION_FAILED.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", () -> client.getSchemas("mygroup"), + e -> e instanceof InternalServerError); + } + + @Test + public void testAddSchema() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).addSchema(anyString(), any()); + doReturn(Response.Status.CREATED.getStatusCode()).when(response).getStatus(); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo versionInfo = + new io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo().version( + 5).type("schema2").ordinal(5); + doReturn(versionInfo).when(response).readEntity( + io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo.class); + VersionInfo versionInfo1 = client.addSchema("mygroup", schemaInfo); + assertEquals(5, versionInfo1.getVersion()); + assertEquals("schema2", versionInfo1.getType()); + assertEquals(5, versionInfo1.getOrdinal()); + // NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.addSchema("mygroup", schemaInfo), e -> e instanceof ResourceNotFoundException); + // SchemaIncompatible exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.addSchema("mygroup", schemaInfo), e -> e instanceof SchemaValidationFailedException); + // SerializationFormatInvalid Exception + doReturn(Response.Status.EXPECTATION_FAILED.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.addSchema("mygroup", schemaInfo), e -> e instanceof SerializationMismatchException); + //Runtime Exception + doReturn(Response.Status.BAD_GATEWAY.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.addSchema("mygroup", schemaInfo), e -> e instanceof InternalServerError); + } + + @Test + public void testGetSchema() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getSchemaFromVersionOrdinal(anyString(), anyInt()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat serializationFormat = ModelHelper.encode(SerializationFormat.custom("custom")); + byte[] schemaData = new byte[0]; + + io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo schemaInfo = + new io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo() + .schemaData(schemaData).type("schema1").serializationFormat(serializationFormat).properties(Collections.emptyMap()); + VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); + doReturn(schemaInfo).when(response).readEntity( + io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo.class); + SchemaInfo schemaInfo1 = client.getSchemaForVersion("mygroup", versionInfo); + assertEquals(schemaInfo.getType(), schemaInfo1.getType()); + // NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getSchemaForVersion("mygroup", versionInfo), e -> e instanceof ResourceNotFoundException); + // Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getSchemaForVersion("mygroup", versionInfo), e -> e instanceof InternalServerError); + } + + @Test + public void testGetEncodingInfo() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getEncodingInfo(anyString(), anyInt()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + String codecType = "gzip"; + EncodingInfo encodingInfo = new EncodingInfo(versionInfo, schemaInfo, codecType); + EncodingId encodingId = new EncodingId(5); + doReturn(ModelHelper.encode(encodingInfo)).when(response).readEntity( + io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo.class); + EncodingInfo encodingInfo1 = client.getEncodingInfo("mygroup", encodingId); + assertEquals(encodingInfo.getCodecType(), encodingInfo1.getCodecType()); + assertEquals(encodingInfo.getSchemaInfo(), encodingInfo1.getSchemaInfo()); + assertEquals(encodingInfo.getVersionInfo(), encodingInfo1.getVersionInfo()); + // NotFound exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getEncodingInfo("mygroup", encodingId), e -> e instanceof ResourceNotFoundException); + // Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getEncodingInfo("mygroup", encodingId), e -> e instanceof InternalServerError); + } + + @Test + public void testGetEncodingId() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getEncodingId(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + String codecType = "gzip"; + VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); + io.pravega.schemaregistry.contract.generated.rest.model.EncodingId encodingId = ModelHelper.encode(new EncodingId(5)); + doReturn(encodingId).when(response).readEntity( + io.pravega.schemaregistry.contract.generated.rest.model.EncodingId.class); + EncodingId encodingId1 = client.getEncodingId("mygroup", versionInfo, codecType); + assertEquals(encodingId.getEncodingId().intValue(), encodingId1.getId()); + // NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getEncodingId("mygroup", versionInfo, codecType), e -> e instanceof ResourceNotFoundException); + // StringNotFound Exception + doReturn(Response.Status.PRECONDITION_FAILED.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getEncodingId("mygroup", versionInfo, codecType), e -> e instanceof CodecTypeNotRegisteredException); + // Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getEncodingId("mygroup", versionInfo, codecType), e -> e instanceof InternalServerError); + } + + @Test + public void testGetLatestSchemaForGroup() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getSchemas(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + SchemaWithVersion schemaWithVersion = new SchemaWithVersion(schemaInfo, versionInfo); + SchemaVersionsList schemaWithVersions = new SchemaVersionsList().schemas(Collections.singletonList(ModelHelper.encode(schemaWithVersion))); + doReturn(schemaWithVersions).when(response).readEntity( + SchemaVersionsList.class); + SchemaWithVersion schemaWithVersion1 = client.getLatestSchemaVersion("mygroup", null); + assertEquals(schemaWithVersion.getSchemaInfo(), schemaWithVersion1.getSchemaInfo()); + assertEquals(schemaWithVersion.getVersionInfo(), schemaWithVersion1.getVersionInfo()); + // NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getLatestSchemaVersion("mygroup", null), e -> e instanceof ResourceNotFoundException); + // Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getLatestSchemaVersion("mygroup", null), e -> e instanceof InternalServerError); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + versionInfo = new VersionInfo("schema2", 5, 5); + serializationFormat = SerializationFormat.custom("custom"); + schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + schemaWithVersion = new SchemaWithVersion(schemaInfo, versionInfo); + doReturn(ModelHelper.encode(schemaWithVersion)).when(response).readEntity( + io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion.class); + schemaWithVersion1 = client.getLatestSchemaVersion("mygroup", "myobject"); + assertEquals(schemaWithVersion.getSchemaInfo(), schemaWithVersion1.getSchemaInfo()); + assertEquals(schemaWithVersion.getVersionInfo(), schemaWithVersion1.getVersionInfo()); + // NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getLatestSchemaVersion("mygroup", "myobject"), e -> e instanceof ResourceNotFoundException); + // Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getLatestSchemaVersion("mygroup", "myobject"), e -> e instanceof InternalServerError); + } + + @Test + public void testGroupEvolutionHistory() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getGroupHistory(anyString()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + SchemaValidationRules schemaValidationRules = SchemaValidationRules.of(Compatibility.backward()); + GroupHistoryRecord groupHistoryRecord = new io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord() + .schemaInfo(ModelHelper.encode(schemaInfo)).version(ModelHelper.encode(versionInfo)) + .validationRules(ModelHelper.encode(schemaValidationRules)).timestamp(100L).schemaString(""); + GroupHistory history = new GroupHistory(); + history.addHistoryItem(groupHistoryRecord); + doReturn(history).when(response).readEntity(GroupHistory.class); + List groupHistoryList = client.getGroupHistory("mygroup"); + assertEquals(1, groupHistoryList.size()); + assertEquals(schemaValidationRules, groupHistoryList.get(0).getRules()); + assertEquals(schemaInfo, groupHistoryList.get(0).getSchema()); + assertEquals(versionInfo, groupHistoryList.get(0).getVersion()); + assertEquals(100L, groupHistoryList.get(0).getTimestamp()); + assertEquals("", groupHistoryList.get(0).getSchemaString()); + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getGroupHistory("mygroup"), e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getGroupHistory("mygroup"), e -> e instanceof InternalServerError); + } + + @Test + public void testGetSchemaVersion() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getSchemaVersion(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); + doReturn(ModelHelper.encode(versionInfo)).when(response).readEntity( + io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo.class); + VersionInfo versionInfo1 = client.getVersionForSchema("mygroup", schemaInfo); + assertEquals(versionInfo.getType(), versionInfo1.getType()); + assertEquals(versionInfo.getVersion(), versionInfo1.getVersion()); + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getVersionForSchema("mygroup", schemaInfo), e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getVersionForSchema("mygroup", schemaInfo), e -> e instanceof InternalServerError); + } + + @Test + public void testGetSchemaVersions() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getSchemaVersions(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); + SchemaWithVersion schemaWithVersion = new SchemaWithVersion(schemaInfo, versionInfo); + SchemaVersionsList list = new SchemaVersionsList().schemas(Collections.singletonList(ModelHelper.encode(schemaWithVersion))); + doReturn(list).when(response).readEntity(SchemaVersionsList.class); + List result = Lists.newArrayList(client.getSchemaVersions("mygroup", null)); + assertEquals(result.size(), 1); + assertEquals(versionInfo, result.get(0).getVersionInfo()); + assertEquals(schemaInfo, result.get(0).getSchemaInfo()); + + result = Lists.newArrayList(client.getSchemaVersions("mygroup", schemaInfo.getType())); + assertEquals(result.size(), 1); + assertEquals(versionInfo, result.get(0).getVersionInfo()); + assertEquals(schemaInfo, result.get(0).getSchemaInfo()); + + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> Lists.newArrayList(client.getSchemaVersions("mygroup", null)), + e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> Lists.newArrayList(client.getSchemaVersions("mygroup", null)), e -> e instanceof InternalServerError); + } + + @Test + public void testValidateSchema() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).validate(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + Valid valid = new Valid().valid(Boolean.TRUE); + doReturn(valid).when(response).readEntity(Valid.class); + Boolean valid1 = client.validateSchema("mygroup", schemaInfo); + assertEquals(valid.isValid(), valid1); + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.validateSchema("mygroup", schemaInfo), e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.validateSchema("mygroup", schemaInfo), e -> e instanceof InternalServerError); + } + + @Test + public void testCanRead() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).canRead(anyString(), any()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + SerializationFormat serializationFormat = SerializationFormat.custom("custom"); + ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); + + SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); + CanRead canRead = new CanRead().compatible(Boolean.TRUE); + doReturn(canRead).when(response).readEntity(CanRead.class); + Boolean canRead1 = client.canReadUsing("mygroup", schemaInfo); + assertEquals(canRead.isCompatible(), canRead1); + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.canReadUsing("mygroup", schemaInfo), e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.canReadUsing("mygroup", schemaInfo), e -> e instanceof InternalServerError); + } + + @Test + public void testGetCodecTypes() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).getCodecTypesList(anyString()); + + doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); + String codecType = "gzip"; + String codecType1 = "snappy"; + CodecTypesList codecTypesList = new CodecTypesList(); + codecTypesList.addCodecTypesItem(codecType); + codecTypesList.addCodecTypesItem(codecType1); + doReturn(codecTypesList).when(response).readEntity(CodecTypesList.class); + List codecTypesList1 = client.getCodecTypes("mygroup"); + assertEquals(2, codecTypesList1.size()); + assertEquals("gzip", codecTypesList1.get(0)); + assertEquals("snappy", codecTypesList1.get(1)); + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getCodecTypes("mygroup"), e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.getCodecTypes("mygroup"), e -> e instanceof InternalServerError); + } + + @Test + public void testAddCodecType() { + ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); + SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); + Response response = mock(Response.class); + doReturn(response).when(proxy).addCodecType(anyString(), any()); + + doReturn(Response.Status.CREATED.getStatusCode()).when(response).getStatus(); + String codecType = "gzip"; + client.addCodecType("mygroup", codecType); + assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatus()); + //NotFound Exception + doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.addCodecType("mygroup", codecType), e -> e instanceof ResourceNotFoundException); + //Runtime Exception + doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); + AssertExtensions.assertThrows("An exception should have been thrown", + () -> client.addCodecType("mygroup", codecType), e -> e instanceof InternalServerError); + } +} diff --git a/common/src/main/java/io/pravega/schemaregistry/common/ContinuationTokenIterator.java b/common/src/main/java/io/pravega/schemaregistry/common/ContinuationTokenIterator.java new file mode 100644 index 000000000..ff998e60d --- /dev/null +++ b/common/src/main/java/io/pravega/schemaregistry/common/ContinuationTokenIterator.java @@ -0,0 +1,93 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.common; + +import lombok.Synchronized; + +import javax.annotation.concurrent.GuardedBy; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Queue; +import java.util.Set; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.function.Function; + +/** + * Continuation token iterator which fetches a batch of values using the loading function. Once those values have been + * iterated over, it uses the continuation token to read more values using the loading function until the function does + * not return a value. + * @param Type of value. + * @param Type of continuation token. + */ +public class ContinuationTokenIterator implements Iterator { + @GuardedBy("$lock") + private final Queue queue; + private final Function>> loadingFunction; + @GuardedBy("lock") + private Token token; + @GuardedBy("$lock") + private T next; + @GuardedBy("$lock") + private boolean canHaveNext; + @GuardedBy("$lock") + private final Set tokens; + + public ContinuationTokenIterator(Function>> loadingFunction, Token tokenIdentity) { + this.loadingFunction = loadingFunction; + this.queue = new LinkedBlockingQueue(); + this.token = tokenIdentity; + this.canHaveNext = true; + this.next = null; + this.tokens = new HashSet<>(); + } + + @Synchronized + private void load() { + next = next == null ? queue.poll() : next; + while (next == null && canHaveNext) { + Map.Entry> result = loadingFunction.apply(token); + boolean tokenUpdated = result.getKey() != null && !tokens.contains(result.getKey()); + if (result.getKey() != null) { + tokens.add(result.getKey()); + } + token = result.getKey(); + + queue.addAll(result.getValue()); + next = queue.poll(); + if (next == null) { + canHaveNext = tokenUpdated; + } + } + } + + @Synchronized + @Override + public boolean hasNext() { + load(); + return canHaveNext; + } + + @Synchronized + @Override + public T next() { + load(); + if (next != null) { + T retVal = next; + next = null; + return retVal; + } else { + assert !canHaveNext; + throw new NoSuchElementException(); + } + } +} diff --git a/common/src/main/java/io/pravega/schemaregistry/common/Either.java b/common/src/main/java/io/pravega/schemaregistry/common/Either.java new file mode 100644 index 000000000..212ad99fe --- /dev/null +++ b/common/src/main/java/io/pravega/schemaregistry/common/Either.java @@ -0,0 +1,51 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.common; + +import com.google.common.base.Preconditions; +import lombok.Data; + +/** + * A holder object consisting of either of two elements. + * + * The objects could be of any type. Exactly one of the values will exist while the other will be null. + * If a mutable object is stored in 'Either', then 'Either' itself effectively becomes mutable. + * + * @param the left element type. + * @param the right element type. + */ +@Data +public class Either { + private final T left; + private final K right; + + private Either(T left, K right) { + this.left = left; + this.right = right; + } + + public static Either left(T t) { + Preconditions.checkNotNull(t); + return new Either(t, null); + } + + public static Either right(K k) { + Preconditions.checkNotNull(k); + return new Either(null, k); + } + + public boolean isLeft() { + return left != null; + } + + public boolean isRight() { + return right != null; + } +} diff --git a/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java b/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java new file mode 100644 index 000000000..3875cbb44 --- /dev/null +++ b/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java @@ -0,0 +1,21 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.common; + +import com.google.common.hash.HashFunction; +import com.google.common.hash.Hashing; + +public class HashUtil { + private static final HashFunction HASH = Hashing.murmur3_128(); + + public static long getFingerprint(byte[] bytes) { + return HASH.hashBytes(bytes).asLong(); + } +} diff --git a/common/src/test/java/io/pravega/schemaregistry/common/ContinuationTokenIteratorTest.java b/common/src/test/java/io/pravega/schemaregistry/common/ContinuationTokenIteratorTest.java new file mode 100644 index 000000000..89989512a --- /dev/null +++ b/common/src/test/java/io/pravega/schemaregistry/common/ContinuationTokenIteratorTest.java @@ -0,0 +1,68 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.common; + +import com.google.common.collect.Lists; +import lombok.Data; +import org.junit.Test; + +import java.util.AbstractMap; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.function.Function; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +public class ContinuationTokenIteratorTest { + @Test + public void test() { + // 1. call method 1st call returns - list of 5 items + new token + // verify that call method is not called until all 10 are read. + // 2. call returns empty list + new token + // 3. call returns empty list + new token + // 4. call returns list of 10 items + new token + // verify that we consume 10 items without calling the callmethod + // 5. call returns empty list + same token. --> this should exit + Queue responses = spy(new LinkedBlockingQueue<>()); + responses.add(new ListWithToken(Lists.newArrayList(1, 2, 3, 4, 5), "1")); + responses.add(new ListWithToken(Collections.emptyList(), "2")); + responses.add(new ListWithToken(Collections.emptyList(), "3")); + responses.add(new ListWithToken(Lists.newArrayList(6, 7, 8, 9, 10), "4")); + responses.add(new ListWithToken(Collections.emptyList(), "4")); + Function>> func = token -> { + ListWithToken result = responses.poll(); + return new AbstractMap.SimpleEntry<>(result.token, result.list); + }; + ContinuationTokenIterator myIterator = new ContinuationTokenIterator<>(func, null); + for (int i = 0; i < 5; i++) { + assertTrue(myIterator.hasNext()); + assertEquals(myIterator.next().intValue(), i + 1); + } + verify(responses, times(1)).poll(); + for (int i = 5; i < 10; i++) { + assertTrue(myIterator.hasNext()); + assertEquals(myIterator.next().intValue(), i + 1); + } + verify(responses, times(4)).poll(); + assertFalse(myIterator.hasNext()); + verify(responses, times(5)).poll(); + } + + @Data + static class ListWithToken { + private final List list; + private final String token; + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java new file mode 100644 index 000000000..f8717337c --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java @@ -0,0 +1,203 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import io.pravega.common.ObjectBuilder; +import lombok.Builder; +import lombok.Data; + +/** + * Defines different Compatibility policy options for schema evolution for schemas within a group. + * The choice of compatibility policy tells the Schema Registry service whether a schema should be accepted to evolve + * into new schema by comparing it with one or more existing versions of the schema. + * + * {@link Type#AllowAny}: allow any changes to schema without any checks performed by the registry. + * {@link Type#DenyAll}: disables any changes to the schema for the group. + * {@link Type#Backward}: a new schema can be used to read data written by previous schema. + * {@link Type#BackwardTransitive}: a new schema can be used read data written by any of previous schemas. + * {@link Type#BackwardTill}: a new schema can be used to read data written by any of previous schemas till schema + * identified by version {@link Compatibility#backwardTill}. + * {@link Type#Forward}: previous schema can be used to read data written by new schema. + * {@link Type#ForwardTransitive}: all previous schemas can read data written by new schema. + * {@link Type#ForwardTill}: All previous schemas till schema identified by version {@link Compatibility#forwardTill} + * can read data written by new schema. + * {@link Type#Full}: both backward and forward compatibility. + * {@link Type#FullTransitive}: both backward and forward compatibility with all previous schemas. + * {@link Type#BackwardAndForwardTill}: All previous schemas till schema identified by version {@link Compatibility#forwardTill} + * can read data written by new schema. New schema can be used to read data written by any of previous schemas till schema + * identified by version {@link Compatibility#backwardTill}. + */ +@Data +@Builder +public class Compatibility implements SchemaValidationRule { + /** + * Enum that defines the Type of compatibility policy. + */ + private final Type compatibility; + /** + * Version info to be specified if the compatibility policy choic.e is either {@link Type#backwardTill} or + * {@link Type#backwardTillAndForwardTill}. + */ + private final VersionInfo backwardTill; + /** + * Version info to be specified if the compatibility policy choice is either {@link Type#forwardTill} or + * {@link Type#backwardTillAndForwardTill}. + */ + private final VersionInfo forwardTill; + + private Compatibility(Type compatibility) { + this(compatibility, null, null); + } + + public Compatibility(Type compatibility, VersionInfo backwardTill, VersionInfo forwardTill) { + this.compatibility = compatibility; + this.backwardTill = backwardTill; + this.forwardTill = forwardTill; + } + + @Override + public String getName() { + return Compatibility.class.getSimpleName(); + } + + public enum Type { + AllowAny, + DenyAll, + Backward, + BackwardTill, + BackwardTransitive, + Forward, + ForwardTill, + ForwardTransitive, + BackwardAndForwardTill, + Full, + FullTransitive; + } + + /** + * Method to create a compatibility policy of type backward. Backward policy implies new schema will be validated + * to be capable of reading data written using the previous schema. + * + * @return Compatibility with Type.Backward. + */ + public static Compatibility backward() { + return new Compatibility(Type.Backward); + } + + /** + * Method to create a compatibility policy of type backward till. BackwardTill policy implies new schema will be validated + * to be capable of reading data written using the all previous schemas till version supplied as input. + * + * @param backwardTill version till which schemas should be checked for compatibility. + * @return Compatibility with Type.BackwardTill version. + */ + public static Compatibility backwardTill(VersionInfo backwardTill) { + return new Compatibility(Type.BackwardTill, backwardTill, null); + } + + /** + * Method to create a compatibility policy of type backward transitive. Backward transitive policy implies + * new schema will be validated to be capable of reading data written using the all previous schemas versions. + * + * @return Compatibility with Type.BackwardTransitive. + */ + public static Compatibility backwardTransitive() { + return new Compatibility(Type.BackwardTransitive); + } + + /** + * Method to create a compatibility policy of type forward. Forward policy implies new schema will be validated + * such that data written using new schema can be read using the previous schema. + * + * @return Compatibility with Type.Forward + */ + public static Compatibility forward() { + return new Compatibility(Type.Forward); + } + + /** + * Method to create a compatibility policy of type forward till. Forward policy implies new schema will be validated + * such that data written using new schema can be read using the all previous schemas till supplied version. + * + * @param forwardTill version till which schemas should be checked for compatibility. + * @return Compatibility with Type.ForwardTill version. + */ + public static Compatibility forwardTill(VersionInfo forwardTill) { + return new Compatibility(Type.ForwardTill, null, forwardTill); + } + + /** + * Method to create a compatibility policy of type forward transitive. + * Forward transitive policy implies new schema will be validated such that data written using new schema + * can be read using all previous schemas. + * + * @return Compatibility with Type.ForwardTransitive. + */ + public static Compatibility forwardTransitive() { + return new Compatibility(Type.ForwardTransitive); + } + + /** + * Method to create a compatibility policy of type full. Full means backward and forward compatibility check with + * previous schema version. Which means new schema can be used to read data written with previous schema and vice versa. + * + * @return Compatibility with Type.Full. + */ + public static Compatibility full() { + return new Compatibility(Type.Full); + } + + /** + * Method to create a compatibility policy of type full transitive. + * Full transitive means backward and forward compatibility check with all previous schema version. + * This implies new schema can be used to read data written with any of the previous schemas and vice versa. + * + * @return Compatibility with Type.FullTransitive. + */ + public static Compatibility fullTransitive() { + return new Compatibility(Type.FullTransitive); + } + + /** + * Method to create a compatibility policy of type backward till and forward till. This is a combination of + * backward till and forward till policies. + * All previous schemas till schema identified by version {@link Compatibility#forwardTill} + * can read data written by new schema. New schema can be used to read data written by any of previous schemas till schema + * identified by version {@link Compatibility#backwardTill}. + * + * @param backwardTill version till which backward compatibility is checked for. + * @param forwardTill version till which forward compatibility is checked for. + * @return Compatibility with Type.FullTransitive. + */ + public static Compatibility backwardTillAndForwardTill(VersionInfo backwardTill, VersionInfo forwardTill) { + return new Compatibility(Type.BackwardAndForwardTill, backwardTill, forwardTill); + } + + /** + * Disable compatibility check and all any schema to be registered. Effectively declares all schemas as compatible. + * + * @return Compatibility with Type.AllowAny + */ + public static Compatibility allowAny() { + return new Compatibility(Type.AllowAny); + } + + /** + * Compatibility policy that disallows any new schema changes. Effecfively rejects all schemas and declares them incompatible. + * + * @return Compatibility with Type.DenyAll + */ + public static Compatibility denyAll() { + return new Compatibility(Type.DenyAll); + } + + public static class CompatibilityBuilder implements ObjectBuilder { + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingId.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingId.java new file mode 100644 index 000000000..2d1d625ca --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingId.java @@ -0,0 +1,35 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import io.pravega.common.ObjectBuilder; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; + +/** + * For each group unique set of Encoding Ids are generated for each unique combination of schema version and codec types + * registered in the group. + * The encoding id will typically be attached to the encoded data in a header to describe how to parse the following data. + * The registry service exposes APIs to resolve encoding id to {@link EncodingInfo} objects that include details about the + * encoding used. + */ +@Data +@Builder +@AllArgsConstructor +public class EncodingId { + /** + * A 4byte id that uniquely identifies a {@link VersionInfo} and codecType pair. + */ + private final int id; + + public static class EncodingIdBuilder implements ObjectBuilder { + } +} \ No newline at end of file diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java new file mode 100644 index 000000000..f5e396ea2 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java @@ -0,0 +1,33 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import lombok.Data; + +/** + * Encoding Info describes the details of encoding for each event payload. Each combination of schema version and codec type + * is uniquely identified by an {@link EncodingId}. + * The registry service exposes APIs to generate or resolve {@link EncodingId} to {@link EncodingInfo}. + */ +@Data +public class EncodingInfo { + /** + * Version of the schema which is used in encoding the data. + */ + private final VersionInfo versionInfo; + /** + * Actual schema which is used in encoding the data. + */ + private final SchemaInfo schemaInfo; + /** + * Codec type which is used in encoding the data. + */ + private final String codecType; +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java new file mode 100644 index 000000000..4b9a3d257 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java @@ -0,0 +1,47 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import lombok.Data; + +/** + * Describes changes to the group and the validation rules {@link GroupHistoryRecord#rules} that were + * applied while registering {@link GroupHistoryRecord#schema} and the unique {@link GroupHistoryRecord#version} identifier + * that was assigned to it. + * It also has {@link GroupHistoryRecord#timestamp} when the schema was added and includes an optional + * {@link GroupHistoryRecord#schemaString} which is populated only if serialization format is one of {@link SerializationFormat#Avro} + * {@link SerializationFormat#Json} or {@link SerializationFormat#Protobuf}. This string is just to help make the schema human readable. + */ +@Data +public class GroupHistoryRecord { + /** + * Schema information object for the schema that was added to the group. + */ + private final SchemaInfo schema; + /** + * Version information object that uniquely identifies the schema in the group. + */ + private final VersionInfo version; + /** + * Validation rules that were applied at the time when the schema was registered. + */ + private final SchemaValidationRules rules; + /** + * Service's Time when the schema was registered. + */ + private final long timestamp; + /** + * A json format string representing the schema. This string will be populated only for serialization formats + * that the service can parse. + */ + private final String schemaString; +} + + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupProperties.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupProperties.java new file mode 100644 index 000000000..4002ceebb --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupProperties.java @@ -0,0 +1,74 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import com.google.common.collect.ImmutableMap; +import lombok.Builder; +import lombok.Data; + +/** + * Different configuration choices for a group. + * + * {@link GroupProperties#serializationFormat} identifies the serialization format used to describe the schema. + * {@link GroupProperties#schemaValidationRules} sets the schema validation policy that needs to be enforced for evolving schemas. + * {@link GroupProperties#allowMultipleTypes} that specifies if multiple schemas with distinct {@link SchemaInfo#type} + * are allowed to coexist within the group. A schema describes an object and each object type is distinctly identified by + * {@link SchemaInfo#type}. Registry service validates new schema with existing schema versions of the same name and versions + * it accordingly. Allowing multiple schemas, each versioned independently, allows applications to use schema registry groups + * for streaming scenarios like event sourcing, or message bus where different types of events could be written to the same + * stream. Similarly, a group with multiple schemas can be used to describe a database catalog with each schema representing + * a different table. + * The users can register new versions of each distinct type of schema, and the registry will check for compatibility + * for each type independently. + * {@link GroupProperties#properties} This is general purpose key value string to include any additional user defined information for the group. + */ +@Builder +@Data +public class GroupProperties { + /** + * Serialization format allowed for the group. + */ + private final SerializationFormat serializationFormat; + /** + * Schema validation rules to be applied for the group. + */ + private final SchemaValidationRules schemaValidationRules; + /** + * Flag to indicate whether multiple types of schemas can be added to the group or not. If set to false, all schemas + * added to the group should have the same {@link SchemaInfo#type}. + */ + private final boolean allowMultipleTypes; + /** + * User defined key value strings for any metadata they want to associate with the group. + */ + private final ImmutableMap properties; + + public GroupProperties(SerializationFormat serializationFormat, SchemaValidationRules schemaValidationRules, boolean allowMultipleTypes) { + this(serializationFormat, schemaValidationRules, allowMultipleTypes, ImmutableMap.of()); + } + + public GroupProperties(SerializationFormat serializationFormat, SchemaValidationRules schemaValidationRules, boolean allowMultipleTypes, ImmutableMap properties) { + this.serializationFormat = serializationFormat; + this.schemaValidationRules = schemaValidationRules; + this.allowMultipleTypes = allowMultipleTypes; + this.properties = properties; + } + + public static final class GroupPropertiesBuilder { + private SchemaValidationRules schemaValidationRules = SchemaValidationRules.of(Compatibility.fullTransitive()); + private boolean allowMultipleTypes = false; + private ImmutableMap properties = ImmutableMap.of(); + + public GroupPropertiesBuilder compatibility(Compatibility compatibility) { + this.schemaValidationRules = SchemaValidationRules.of(compatibility); + return this; + } + } +} \ No newline at end of file diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaInfo.java new file mode 100644 index 000000000..bc4ed9f62 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaInfo.java @@ -0,0 +1,62 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import io.pravega.common.ObjectBuilder; +import lombok.Builder; +import lombok.Data; + +import java.nio.ByteBuffer; + +/** + * Encapsulates properties of a schema. + * {@link SchemaInfo#type} object type represented by the schema. This is used to identify the exact object type. + * If (ref: {@link GroupProperties#allowMultipleTypes}) is set to true, the group will allow multiple schemas to coexist. + * {@link SchemaInfo#serializationFormat} Serialization format. + * {@link SchemaInfo#schemaData} Schema as an array of 8-bit unsigned bytes. This is schema-type specific and to be consumed + * by schema-type specific parsers. + * {@link SchemaInfo#properties} A key value map of strings where user defined metadata can be recorded with schemas. + * This is not interpreted by the registry service or client and can be used by applications for sharing any additional + * application specific information with the schema. + */ +@Data +@Builder +public class SchemaInfo { + /** + * Identifies the object type that is represented by the schema. + */ + private final String type; + /** + * Serialization format that this schema is intended to be used for. + */ + private final SerializationFormat serializationFormat; + /** + * Schema as an array of 8-bit unsigned bytes. + */ + private final ByteBuffer schemaData; + /** + * User defined key value strings that users can use to add any additional metadata to the schema. + */ + private final ImmutableMap properties; + + public SchemaInfo(String type, SerializationFormat serializationFormat, ByteBuffer schemaData, ImmutableMap properties) { + Preconditions.checkArgument(type != null); + Preconditions.checkArgument(serializationFormat != SerializationFormat.Any); + this.type = type; + this.serializationFormat = serializationFormat; + this.schemaData = schemaData; + this.properties = properties; + } + + public static class SchemaInfoBuilder implements ObjectBuilder { + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRule.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRule.java new file mode 100644 index 000000000..c89670543 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRule.java @@ -0,0 +1,23 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +/** + * Base interface to define all schema validation rules. Schema validation rules are applied whenever new schemas are registered + * and only schemas that satisfy validation rules are accepted by the registry into the group. + */ +public interface SchemaValidationRule { + /** + * Name of the rule to identify it with. + * + * @return name of the rule. + */ + String getName(); +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java new file mode 100644 index 000000000..06f77fb09 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java @@ -0,0 +1,66 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import com.google.common.base.Preconditions; +import io.pravega.common.ObjectBuilder; +import lombok.Builder; +import lombok.Data; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Schema validation rules that are applied for checking if a schema is valid. + * This contains a set of rules {@link SchemaValidationRule}. Currently the only rule that is supported is {@link Compatibility}. + * The schema will be compared against one or more existing schemas in the group by checking it for satisfying each of the + * rules. + */ +@Data +@Builder +public class SchemaValidationRules { + /** + * Map of schema validation rule name to corresponding schema validation rule. + */ + private final Map rules; + + private SchemaValidationRules(Map rules) { + this.rules = rules; + } + + /** + * Method to create a rule for compatibility. + * + * @param compatibility compatibility policy to be used. + * @return A singleton rules map containing the compatibility rule. + */ + public static SchemaValidationRules of(Compatibility compatibility) { + return new SchemaValidationRules(Collections.singletonMap(compatibility.getName(), compatibility)); + } + + /** + * Method to create SchemaValidationRules from the list of supplied rules. If multiple same rule are present + * in the list then only the latest rule of each type is added to the Rules map. + * Currently the only rule supported is {@link Compatibility}. + * @param rules List of rules. + * @return SchemaValidationRules object. + */ + public static SchemaValidationRules of(List rules) { + Preconditions.checkNotNull(rules); + Preconditions.checkArgument(rules.stream().allMatch(x -> x instanceof Compatibility), "Only compatibility rule is supported."); + return new SchemaValidationRules(rules.stream().collect(Collectors.toMap(SchemaValidationRule::getName, x -> x))); + } + + public static class SchemaValidationRulesBuilder implements ObjectBuilder { + } + +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaWithVersion.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaWithVersion.java new file mode 100644 index 000000000..45e730cbf --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaWithVersion.java @@ -0,0 +1,31 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; + +/** + * Object that encapsulates schemaInfo with its associated version. + */ +@Data +@Builder +@AllArgsConstructor +public class SchemaWithVersion { + /** + * Schema Information object. + */ + private final SchemaInfo schemaInfo; + /** + * Version information object that identifies the corresponding schema object. + */ + private final VersionInfo versionInfo; +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java new file mode 100644 index 000000000..cecb9b257 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java @@ -0,0 +1,45 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import lombok.AccessLevel; +import lombok.Getter; +import lombok.Setter; + +/** + * Different types of serialization formats used for serializing data. + * Registry supports Avro, Protobuf and Json serialization formats but any custom type could be used with the registry using custom type. + * + * If a serialization format is not present in the enum it can be specified using {@link SerializationFormat#custom} with {@link SerializationFormat#customTypeName}. + * Allowed values of {@link Compatibility} mode with custom type are AllowAny or DenyAll. + */ + +public enum SerializationFormat { + Avro, + Protobuf, + Json, + Any, + Custom; + + @Getter + @Setter(AccessLevel.PRIVATE) + private String customTypeName; + + /** + * Method to define a custom serialization format with a custom name. + * @param customTypeName Custom type name. + * @return {@link SerializationFormat#Custom} with supplied custom type name. + */ + public static SerializationFormat custom(String customTypeName) { + SerializationFormat type = SerializationFormat.Custom; + type.setCustomTypeName(customTypeName); + return type; + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java new file mode 100644 index 000000000..c281e75e3 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java @@ -0,0 +1,47 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import io.pravega.common.ObjectBuilder; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; + +/** + * Version information object that encapsulates properties that uniquely identify a specific version of a schema within a group. + * + * {@link VersionInfo#type} is same as {@link SchemaInfo#type} which represents the object type for which the version is computed. + * {@link VersionInfo#version} the registry assigned monotonically increasing version number for the schema for specific object type. + * Since the version number is per object type, so type and version number forms a unique pair. + * {@link VersionInfo#ordinal} Absolute ordinal of the schema for all schemas in the group. This uniquely identifies the + * version within a group. + */ +@Data +@Builder +@AllArgsConstructor +public class VersionInfo { + /** + * Object type which is declared in the corresponding {@link SchemaInfo#type} for the schemainfo that is identified + * by this version info. + */ + private final String type; + /** + * A version number that identifies the position of schema among other schemas in the group that share the same 'type'. + */ + private final int version; + /** + * A position identifier that uniquely identifies the schema within a group and represents the order in which this + * schema was included in the group. + */ + private final int ordinal; + + public static class VersionInfoBuilder implements ObjectBuilder { + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/AddedTo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/AddedTo.java new file mode 100644 index 000000000..310f78ff8 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/AddedTo.java @@ -0,0 +1,101 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.validation.constraints.*; + +/** + * Map of Group names to versionInfos in the group. This is for all the groups where the schema is registered. + */ +@ApiModel(description = "Map of Group names to versionInfos in the group. This is for all the groups where the schema is registered.") + +public class AddedTo { + @JsonProperty("groups") + private Map groups = new HashMap(); + + public AddedTo groups(Map groups) { + this.groups = groups; + return this; + } + + public AddedTo putGroupsItem(String key, VersionInfo groupsItem) { + this.groups.put(key, groupsItem); + return this; + } + + /** + * Get groups + * @return groups + **/ + @JsonProperty("groups") + @ApiModelProperty(required = true, value = "") + @NotNull + public Map getGroups() { + return groups; + } + + public void setGroups(Map groups) { + this.groups = groups; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + AddedTo addedTo = (AddedTo) o; + return Objects.equals(this.groups, addedTo.groups); + } + + @Override + public int hashCode() { + return Objects.hash(groups); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class AddedTo {\n"); + + sb.append(" groups: ").append(toIndentedString(groups)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CanRead.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CanRead.java new file mode 100644 index 000000000..5f101741a --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CanRead.java @@ -0,0 +1,92 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Response object for canRead api. + */ +@ApiModel(description = "Response object for canRead api.") + +public class CanRead { + @JsonProperty("compatible") + private Boolean compatible = null; + + public CanRead compatible(Boolean compatible) { + this.compatible = compatible; + return this; + } + + /** + * Whether given schema is compatible and can be used for reads. Compatibility is checked against existing group schemas subject to group's configured compatibility policy. + * @return compatible + **/ + @JsonProperty("compatible") + @ApiModelProperty(required = true, value = "Whether given schema is compatible and can be used for reads. Compatibility is checked against existing group schemas subject to group's configured compatibility policy.") + @NotNull + public Boolean isCompatible() { + return compatible; + } + + public void setCompatible(Boolean compatible) { + this.compatible = compatible; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + CanRead canRead = (CanRead) o; + return Objects.equals(this.compatible, canRead.compatible); + } + + @Override + public int hashCode() { + return Objects.hash(compatible); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class CanRead {\n"); + + sb.append(" compatible: ").append(toIndentedString(compatible)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java new file mode 100644 index 000000000..96c10bacc --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java @@ -0,0 +1,101 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.ArrayList; +import java.util.List; +import javax.validation.constraints.*; + +/** + * Response object for listCodecTypes. + */ +@ApiModel(description = "Response object for listCodecTypes.") + +public class CodecTypesList { + @JsonProperty("codecTypes") + private List codecTypes = null; + + public CodecTypesList codecTypes(List codecTypes) { + this.codecTypes = codecTypes; + return this; + } + + public CodecTypesList addCodecTypesItem(String codecTypesItem) { + if (this.codecTypes == null) { + this.codecTypes = new ArrayList(); + } + this.codecTypes.add(codecTypesItem); + return this; + } + + /** + * List of codecTypes. + * @return codecTypes + **/ + @JsonProperty("codecTypes") + @ApiModelProperty(value = "List of codecTypes.") + public List getCodecTypes() { + return codecTypes; + } + + public void setCodecTypes(List codecTypes) { + this.codecTypes = codecTypes; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + CodecTypesList codecTypesList = (CodecTypesList) o; + return Objects.equals(this.codecTypes, codecTypesList.codecTypes); + } + + @Override + public int hashCode() { + return Objects.hash(codecTypes); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class CodecTypesList {\n"); + + sb.append(" codecTypes: ").append(toIndentedString(codecTypes)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java new file mode 100644 index 000000000..459893324 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java @@ -0,0 +1,216 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Schema Compatibility validation rule. + */ +@ApiModel(description = "Schema Compatibility validation rule.") + +public class Compatibility { + @JsonProperty("name") + private String name = null; + + /** + * Compatibility policy enum. + */ + public enum PolicyEnum { + ALLOWANY("AllowAny"), + + DENYALL("DenyAll"), + + BACKWARD("Backward"), + + FORWARD("Forward"), + + FORWARDTRANSITIVE("ForwardTransitive"), + + BACKWARDTRANSITIVE("BackwardTransitive"), + + BACKWARDTILL("BackwardTill"), + + FORWARDTILL("ForwardTill"), + + BACKWARDANDFORWARDTILL("BackwardAndForwardTill"), + + FULL("Full"), + + FULLTRANSITIVE("FullTransitive"); + + private String value; + + PolicyEnum(String value) { + this.value = value; + } + + @Override + @JsonValue + public String toString() { + return String.valueOf(value); + } + + @JsonCreator + public static PolicyEnum fromValue(String text) { + for (PolicyEnum b : PolicyEnum.values()) { + if (String.valueOf(b.value).equals(text)) { + return b; + } + } + return null; + } + } + + @JsonProperty("policy") + private PolicyEnum policy = null; + + @JsonProperty("backwardTill") + private VersionInfo backwardTill = null; + + @JsonProperty("forwardTill") + private VersionInfo forwardTill = null; + + public Compatibility name(String name) { + this.name = name; + return this; + } + + /** + * Name is used to identify the type of SchemaValidationRule. For Compatibility rule the name should be \"Compatibility\". + * @return name + **/ + @JsonProperty("name") + @ApiModelProperty(required = true, value = "Name is used to identify the type of SchemaValidationRule. For Compatibility rule the name should be \"Compatibility\".") + @NotNull + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Compatibility policy(PolicyEnum policy) { + this.policy = policy; + return this; + } + + /** + * Compatibility policy enum. + * @return policy + **/ + @JsonProperty("policy") + @ApiModelProperty(required = true, value = "Compatibility policy enum.") + @NotNull + public PolicyEnum getPolicy() { + return policy; + } + + public void setPolicy(PolicyEnum policy) { + this.policy = policy; + } + + public Compatibility backwardTill(VersionInfo backwardTill) { + this.backwardTill = backwardTill; + return this; + } + + /** + * Version for backward till if policy is BackwardTill or BackwardAndForwardTill. + * @return backwardTill + **/ + @JsonProperty("backwardTill") + @ApiModelProperty(value = "Version for backward till if policy is BackwardTill or BackwardAndForwardTill.") + public VersionInfo getBackwardTill() { + return backwardTill; + } + + public void setBackwardTill(VersionInfo backwardTill) { + this.backwardTill = backwardTill; + } + + public Compatibility forwardTill(VersionInfo forwardTill) { + this.forwardTill = forwardTill; + return this; + } + + /** + * Version for forward till if policy is ForwardTill or BackwardAndForwardTill. + * @return forwardTill + **/ + @JsonProperty("forwardTill") + @ApiModelProperty(value = "Version for forward till if policy is ForwardTill or BackwardAndForwardTill.") + public VersionInfo getForwardTill() { + return forwardTill; + } + + public void setForwardTill(VersionInfo forwardTill) { + this.forwardTill = forwardTill; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Compatibility compatibility = (Compatibility) o; + return Objects.equals(this.name, compatibility.name) && + Objects.equals(this.policy, compatibility.policy) && + Objects.equals(this.backwardTill, compatibility.backwardTill) && + Objects.equals(this.forwardTill, compatibility.forwardTill); + } + + @Override + public int hashCode() { + return Objects.hash(name, policy, backwardTill, forwardTill); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class Compatibility {\n"); + + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" policy: ").append(toIndentedString(policy)).append("\n"); + sb.append(" backwardTill: ").append(toIndentedString(backwardTill)).append("\n"); + sb.append(" forwardTill: ").append(toIndentedString(forwardTill)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CreateGroupRequest.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CreateGroupRequest.java new file mode 100644 index 000000000..22d2b8b29 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CreateGroupRequest.java @@ -0,0 +1,117 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * CreateGroupRequest + */ + +public class CreateGroupRequest { + @JsonProperty("groupName") + private String groupName = null; + + @JsonProperty("groupProperties") + private GroupProperties groupProperties = null; + + public CreateGroupRequest groupName(String groupName) { + this.groupName = groupName; + return this; + } + + /** + * Get groupName + * @return groupName + **/ + @JsonProperty("groupName") + @ApiModelProperty(required = true, value = "") + @NotNull + public String getGroupName() { + return groupName; + } + + public void setGroupName(String groupName) { + this.groupName = groupName; + } + + public CreateGroupRequest groupProperties(GroupProperties groupProperties) { + this.groupProperties = groupProperties; + return this; + } + + /** + * Get groupProperties + * @return groupProperties + **/ + @JsonProperty("groupProperties") + @ApiModelProperty(required = true, value = "") + @NotNull + public GroupProperties getGroupProperties() { + return groupProperties; + } + + public void setGroupProperties(GroupProperties groupProperties) { + this.groupProperties = groupProperties; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + CreateGroupRequest createGroupRequest = (CreateGroupRequest) o; + return Objects.equals(this.groupName, createGroupRequest.groupName) && + Objects.equals(this.groupProperties, createGroupRequest.groupProperties); + } + + @Override + public int hashCode() { + return Objects.hash(groupName, groupProperties); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class CreateGroupRequest {\n"); + + sb.append(" groupName: ").append(toIndentedString(groupName)).append("\n"); + sb.append(" groupProperties: ").append(toIndentedString(groupProperties)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingId.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingId.java new file mode 100644 index 000000000..50f95270c --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingId.java @@ -0,0 +1,92 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Encoding id that uniquely identifies a schema version and codec type pair. + */ +@ApiModel(description = "Encoding id that uniquely identifies a schema version and codec type pair.") + +public class EncodingId { + @JsonProperty("encodingId") + private Integer encodingId = null; + + public EncodingId encodingId(Integer encodingId) { + this.encodingId = encodingId; + return this; + } + + /** + * encoding id generated by service. + * @return encodingId + **/ + @JsonProperty("encodingId") + @ApiModelProperty(required = true, value = "encoding id generated by service.") + @NotNull + public Integer getEncodingId() { + return encodingId; + } + + public void setEncodingId(Integer encodingId) { + this.encodingId = encodingId; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + EncodingId encodingId = (EncodingId) o; + return Objects.equals(this.encodingId, encodingId.encodingId); + } + + @Override + public int hashCode() { + return Objects.hash(encodingId); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class EncodingId {\n"); + + sb.append(" encodingId: ").append(toIndentedString(encodingId)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingInfo.java new file mode 100644 index 000000000..1276ec038 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingInfo.java @@ -0,0 +1,144 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Encoding information object that resolves the schema version and codec type used for corresponding encoding id. + */ +@ApiModel(description = "Encoding information object that resolves the schema version and codec type used for corresponding encoding id.") + +public class EncodingInfo { + @JsonProperty("schemaInfo") + private SchemaInfo schemaInfo = null; + + @JsonProperty("versionInfo") + private VersionInfo versionInfo = null; + + @JsonProperty("codecType") + private String codecType = null; + + public EncodingInfo schemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + return this; + } + + /** + * Schema information object. + * @return schemaInfo + **/ + @JsonProperty("schemaInfo") + @ApiModelProperty(required = true, value = "Schema information object.") + @NotNull + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + public void setSchemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + } + + public EncodingInfo versionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; + return this; + } + + /** + * Version information object. + * @return versionInfo + **/ + @JsonProperty("versionInfo") + @ApiModelProperty(required = true, value = "Version information object.") + @NotNull + public VersionInfo getVersionInfo() { + return versionInfo; + } + + public void setVersionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; + } + + public EncodingInfo codecType(String codecType) { + this.codecType = codecType; + return this; + } + + /** + * Codec type. + * @return codecType + **/ + @JsonProperty("codecType") + @ApiModelProperty(required = true, value = "Codec type.") + @NotNull + public String getCodecType() { + return codecType; + } + + public void setCodecType(String codecType) { + this.codecType = codecType; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + EncodingInfo encodingInfo = (EncodingInfo) o; + return Objects.equals(this.schemaInfo, encodingInfo.schemaInfo) && + Objects.equals(this.versionInfo, encodingInfo.versionInfo) && + Objects.equals(this.codecType, encodingInfo.codecType); + } + + @Override + public int hashCode() { + return Objects.hash(schemaInfo, versionInfo, codecType); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class EncodingInfo {\n"); + + sb.append(" schemaInfo: ").append(toIndentedString(schemaInfo)).append("\n"); + sb.append(" versionInfo: ").append(toIndentedString(versionInfo)).append("\n"); + sb.append(" codecType: ").append(toIndentedString(codecType)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GetEncodingIdRequest.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GetEncodingIdRequest.java new file mode 100644 index 000000000..6376af636 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GetEncodingIdRequest.java @@ -0,0 +1,117 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * GetEncodingIdRequest + */ + +public class GetEncodingIdRequest { + @JsonProperty("versionInfo") + private VersionInfo versionInfo = null; + + @JsonProperty("codecType") + private String codecType = null; + + public GetEncodingIdRequest versionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; + return this; + } + + /** + * Get versionInfo + * @return versionInfo + **/ + @JsonProperty("versionInfo") + @ApiModelProperty(required = true, value = "") + @NotNull + public VersionInfo getVersionInfo() { + return versionInfo; + } + + public void setVersionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; + } + + public GetEncodingIdRequest codecType(String codecType) { + this.codecType = codecType; + return this; + } + + /** + * Get codecType + * @return codecType + **/ + @JsonProperty("codecType") + @ApiModelProperty(required = true, value = "") + @NotNull + public String getCodecType() { + return codecType; + } + + public void setCodecType(String codecType) { + this.codecType = codecType; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GetEncodingIdRequest getEncodingIdRequest = (GetEncodingIdRequest) o; + return Objects.equals(this.versionInfo, getEncodingIdRequest.versionInfo) && + Objects.equals(this.codecType, getEncodingIdRequest.codecType); + } + + @Override + public int hashCode() { + return Objects.hash(versionInfo, codecType); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class GetEncodingIdRequest {\n"); + + sb.append(" versionInfo: ").append(toIndentedString(versionInfo)).append("\n"); + sb.append(" codecType: ").append(toIndentedString(codecType)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistory.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistory.java new file mode 100644 index 000000000..cf195ba93 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistory.java @@ -0,0 +1,101 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.ArrayList; +import java.util.List; +import javax.validation.constraints.*; + +/** + * GroupHistory + */ + +public class GroupHistory { + @JsonProperty("history") + private List history = null; + + public GroupHistory history(List history) { + this.history = history; + return this; + } + + public GroupHistory addHistoryItem(GroupHistoryRecord historyItem) { + if (this.history == null) { + this.history = new ArrayList(); + } + this.history.add(historyItem); + return this; + } + + /** + * Chronological list of Group History records. + * @return history + **/ + @JsonProperty("history") + @ApiModelProperty(value = "Chronological list of Group History records.") + public List getHistory() { + return history; + } + + public void setHistory(List history) { + this.history = history; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GroupHistory groupHistory = (GroupHistory) o; + return Objects.equals(this.history, groupHistory.history); + } + + @Override + public int hashCode() { + return Objects.hash(history); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class GroupHistory {\n"); + + sb.append(" history: ").append(toIndentedString(history)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java new file mode 100644 index 000000000..6d7dd7476 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java @@ -0,0 +1,194 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Group History Record that describes each schema evolution - schema information, version generated for the schema, time and rules used for schema validation. + */ +@ApiModel(description = "Group History Record that describes each schema evolution - schema information, version generated for the schema, time and rules used for schema validation.") + +public class GroupHistoryRecord { + @JsonProperty("schemaInfo") + private SchemaInfo schemaInfo = null; + + @JsonProperty("version") + private VersionInfo version = null; + + @JsonProperty("validationRules") + private SchemaValidationRules validationRules = null; + + @JsonProperty("timestamp") + private Long timestamp = null; + + @JsonProperty("schemaString") + private String schemaString = null; + + public GroupHistoryRecord schemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + return this; + } + + /** + * Schema information object. + * @return schemaInfo + **/ + @JsonProperty("schemaInfo") + @ApiModelProperty(required = true, value = "Schema information object.") + @NotNull + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + public void setSchemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + } + + public GroupHistoryRecord version(VersionInfo version) { + this.version = version; + return this; + } + + /** + * Schema version information object. + * @return version + **/ + @JsonProperty("version") + @ApiModelProperty(required = true, value = "Schema version information object.") + @NotNull + public VersionInfo getVersion() { + return version; + } + + public void setVersion(VersionInfo version) { + this.version = version; + } + + public GroupHistoryRecord validationRules(SchemaValidationRules validationRules) { + this.validationRules = validationRules; + return this; + } + + /** + * Schema validation rules applied. + * @return validationRules + **/ + @JsonProperty("validationRules") + @ApiModelProperty(required = true, value = "Schema validation rules applied.") + @NotNull + public SchemaValidationRules getValidationRules() { + return validationRules; + } + + public void setValidationRules(SchemaValidationRules validationRules) { + this.validationRules = validationRules; + } + + public GroupHistoryRecord timestamp(Long timestamp) { + this.timestamp = timestamp; + return this; + } + + /** + * Timestamp when the schema was added. + * @return timestamp + **/ + @JsonProperty("timestamp") + @ApiModelProperty(required = true, value = "Timestamp when the schema was added.") + @NotNull + public Long getTimestamp() { + return timestamp; + } + + public void setTimestamp(Long timestamp) { + this.timestamp = timestamp; + } + + public GroupHistoryRecord schemaString(String schemaString) { + this.schemaString = schemaString; + return this; + } + + /** + * Schema as json string for serialization formats that registry service understands. + * @return schemaString + **/ + @JsonProperty("schemaString") + @ApiModelProperty(value = "Schema as json string for serialization formats that registry service understands.") + public String getSchemaString() { + return schemaString; + } + + public void setSchemaString(String schemaString) { + this.schemaString = schemaString; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GroupHistoryRecord groupHistoryRecord = (GroupHistoryRecord) o; + return Objects.equals(this.schemaInfo, groupHistoryRecord.schemaInfo) && + Objects.equals(this.version, groupHistoryRecord.version) && + Objects.equals(this.validationRules, groupHistoryRecord.validationRules) && + Objects.equals(this.timestamp, groupHistoryRecord.timestamp) && + Objects.equals(this.schemaString, groupHistoryRecord.schemaString); + } + + @Override + public int hashCode() { + return Objects.hash(schemaInfo, version, validationRules, timestamp, schemaString); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class GroupHistoryRecord {\n"); + + sb.append(" schemaInfo: ").append(toIndentedString(schemaInfo)).append("\n"); + sb.append(" version: ").append(toIndentedString(version)).append("\n"); + sb.append(" validationRules: ").append(toIndentedString(validationRules)).append("\n"); + sb.append(" timestamp: ").append(toIndentedString(timestamp)).append("\n"); + sb.append(" schemaString: ").append(toIndentedString(schemaString)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupProperties.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupProperties.java new file mode 100644 index 000000000..4bbb60b12 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupProperties.java @@ -0,0 +1,179 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; +import io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.validation.constraints.*; + +/** + * Metadata for a group. + */ +@ApiModel(description = "Metadata for a group.") + +public class GroupProperties { + @JsonProperty("serializationFormat") + private SerializationFormat serializationFormat = null; + + @JsonProperty("schemaValidationRules") + private SchemaValidationRules schemaValidationRules = null; + + @JsonProperty("allowMultipleTypes") + private Boolean allowMultipleTypes = null; + + @JsonProperty("properties") + private Map properties = null; + + public GroupProperties serializationFormat(SerializationFormat serializationFormat) { + this.serializationFormat = serializationFormat; + return this; + } + + /** + * serialization format for the group. + * @return serializationFormat + **/ + @JsonProperty("serializationFormat") + @ApiModelProperty(required = true, value = "serialization format for the group.") + @NotNull + public SerializationFormat getSerializationFormat() { + return serializationFormat; + } + + public void setSerializationFormat(SerializationFormat serializationFormat) { + this.serializationFormat = serializationFormat; + } + + public GroupProperties schemaValidationRules(SchemaValidationRules schemaValidationRules) { + this.schemaValidationRules = schemaValidationRules; + return this; + } + + /** + * Validation rules to apply while registering new schema. + * @return schemaValidationRules + **/ + @JsonProperty("schemaValidationRules") + @ApiModelProperty(required = true, value = "Validation rules to apply while registering new schema.") + @NotNull + public SchemaValidationRules getSchemaValidationRules() { + return schemaValidationRules; + } + + public void setSchemaValidationRules(SchemaValidationRules schemaValidationRules) { + this.schemaValidationRules = schemaValidationRules; + } + + public GroupProperties allowMultipleTypes(Boolean allowMultipleTypes) { + this.allowMultipleTypes = allowMultipleTypes; + return this; + } + + /** + * Flag to indicate whether to allow multiple schemas representing distinct objects to be registered in the group. + * @return allowMultipleTypes + **/ + @JsonProperty("allowMultipleTypes") + @ApiModelProperty(required = true, value = "Flag to indicate whether to allow multiple schemas representing distinct objects to be registered in the group.") + @NotNull + public Boolean isAllowMultipleTypes() { + return allowMultipleTypes; + } + + public void setAllowMultipleTypes(Boolean allowMultipleTypes) { + this.allowMultipleTypes = allowMultipleTypes; + } + + public GroupProperties properties(Map properties) { + this.properties = properties; + return this; + } + + public GroupProperties putPropertiesItem(String key, String propertiesItem) { + if (this.properties == null) { + this.properties = new HashMap(); + } + this.properties.put(key, propertiesItem); + return this; + } + + /** + * User defined Key value strings. + * @return properties + **/ + @JsonProperty("properties") + @ApiModelProperty(value = "User defined Key value strings.") + public Map getProperties() { + return properties; + } + + public void setProperties(Map properties) { + this.properties = properties; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GroupProperties groupProperties = (GroupProperties) o; + return Objects.equals(this.serializationFormat, groupProperties.serializationFormat) && + Objects.equals(this.schemaValidationRules, groupProperties.schemaValidationRules) && + Objects.equals(this.allowMultipleTypes, groupProperties.allowMultipleTypes) && + Objects.equals(this.properties, groupProperties.properties); + } + + @Override + public int hashCode() { + return Objects.hash(serializationFormat, schemaValidationRules, allowMultipleTypes, properties); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class GroupProperties {\n"); + + sb.append(" serializationFormat: ").append(toIndentedString(serializationFormat)).append("\n"); + sb.append(" schemaValidationRules: ").append(toIndentedString(schemaValidationRules)).append("\n"); + sb.append(" allowMultipleTypes: ").append(toIndentedString(allowMultipleTypes)).append("\n"); + sb.append(" properties: ").append(toIndentedString(properties)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ListGroupsResponse.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ListGroupsResponse.java new file mode 100644 index 000000000..966b6898f --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ListGroupsResponse.java @@ -0,0 +1,128 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.validation.constraints.*; + +/** + * Map of Group names to group properties. For partially created groups, the group properties may be null. + */ +@ApiModel(description = "Map of Group names to group properties. For partially created groups, the group properties may be null.") + +public class ListGroupsResponse { + @JsonProperty("groups") + private Map groups = null; + + @JsonProperty("continuationToken") + private String continuationToken = null; + + public ListGroupsResponse groups(Map groups) { + this.groups = groups; + return this; + } + + public ListGroupsResponse putGroupsItem(String key, GroupProperties groupsItem) { + if (this.groups == null) { + this.groups = new HashMap(); + } + this.groups.put(key, groupsItem); + return this; + } + + /** + * Get groups + * @return groups + **/ + @JsonProperty("groups") + @ApiModelProperty(value = "") + public Map getGroups() { + return groups; + } + + public void setGroups(Map groups) { + this.groups = groups; + } + + public ListGroupsResponse continuationToken(String continuationToken) { + this.continuationToken = continuationToken; + return this; + } + + /** + * Continuation token to identify the position of last group in the response. + * @return continuationToken + **/ + @JsonProperty("continuationToken") + @ApiModelProperty(required = true, value = "Continuation token to identify the position of last group in the response.") + @NotNull + public String getContinuationToken() { + return continuationToken; + } + + public void setContinuationToken(String continuationToken) { + this.continuationToken = continuationToken; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ListGroupsResponse listGroupsResponse = (ListGroupsResponse) o; + return Objects.equals(this.groups, listGroupsResponse.groups) && + Objects.equals(this.continuationToken, listGroupsResponse.continuationToken); + } + + @Override + public int hashCode() { + return Objects.hash(groups, continuationToken); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ListGroupsResponse {\n"); + + sb.append(" groups: ").append(toIndentedString(groups)).append("\n"); + sb.append(" continuationToken: ").append(toIndentedString(continuationToken)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaInfo.java new file mode 100644 index 000000000..2be4282ab --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaInfo.java @@ -0,0 +1,179 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Arrays; +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.validation.constraints.*; + +/** + * Schema information object that encapsulates various properties of a schema. + */ +@ApiModel(description = "Schema information object that encapsulates various properties of a schema.") + +public class SchemaInfo { + @JsonProperty("type") + private String type = null; + + @JsonProperty("serializationFormat") + private SerializationFormat serializationFormat = null; + + @JsonProperty("schemaData") + private byte[] schemaData = null; + + @JsonProperty("properties") + private Map properties = null; + + public SchemaInfo type(String type) { + this.type = type; + return this; + } + + /** + * Name of the schema. This identifies the type of object the schema payload represents. + * @return type + **/ + @JsonProperty("type") + @ApiModelProperty(required = true, value = "Name of the schema. This identifies the type of object the schema payload represents.") + @NotNull + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public SchemaInfo serializationFormat(SerializationFormat serializationFormat) { + this.serializationFormat = serializationFormat; + return this; + } + + /** + * Type of schema. + * @return serializationFormat + **/ + @JsonProperty("serializationFormat") + @ApiModelProperty(required = true, value = "Type of schema.") + @NotNull + public SerializationFormat getSerializationFormat() { + return serializationFormat; + } + + public void setSerializationFormat(SerializationFormat serializationFormat) { + this.serializationFormat = serializationFormat; + } + + public SchemaInfo schemaData(byte[] schemaData) { + this.schemaData = schemaData; + return this; + } + + /** + * Base64 encoded string for binary data for schema. + * @return schemaData + **/ + @JsonProperty("schemaData") + @ApiModelProperty(required = true, value = "Base64 encoded string for binary data for schema.") + @NotNull + public byte[] getSchemaData() { + return schemaData; + } + + public void setSchemaData(byte[] schemaData) { + this.schemaData = schemaData; + } + + public SchemaInfo properties(Map properties) { + this.properties = properties; + return this; + } + + public SchemaInfo putPropertiesItem(String key, String propertiesItem) { + if (this.properties == null) { + this.properties = new HashMap(); + } + this.properties.put(key, propertiesItem); + return this; + } + + /** + * User defined key value strings. + * @return properties + **/ + @JsonProperty("properties") + @ApiModelProperty(value = "User defined key value strings.") + public Map getProperties() { + return properties; + } + + public void setProperties(Map properties) { + this.properties = properties; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SchemaInfo schemaInfo = (SchemaInfo) o; + return Objects.equals(this.type, schemaInfo.type) && + Objects.equals(this.serializationFormat, schemaInfo.serializationFormat) && + Arrays.equals(this.schemaData, schemaInfo.schemaData) && + Objects.equals(this.properties, schemaInfo.properties); + } + + @Override + public int hashCode() { + return Objects.hash(type, serializationFormat, schemaData, properties); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class SchemaInfo {\n"); + + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" serializationFormat: ").append(toIndentedString(serializationFormat)).append("\n"); + sb.append(" schemaData: ").append(toIndentedString(schemaData)).append("\n"); + sb.append(" properties: ").append(toIndentedString(properties)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRule.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRule.java new file mode 100644 index 000000000..9fb9ee11d --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRule.java @@ -0,0 +1,92 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Schema validation rule base class. + */ +@ApiModel(description = "Schema validation rule base class.") + +public class SchemaValidationRule { + @JsonProperty("rule") + private Object rule = null; + + public SchemaValidationRule rule(Object rule) { + this.rule = rule; + return this; + } + + /** + * Specific schema validation rule. The only rule we have presently is Compatibility. The \"name\" is used to identify specific Rule type. The only rule supported in this is Compatibility. + * @return rule + **/ + @JsonProperty("rule") + @ApiModelProperty(required = true, value = "Specific schema validation rule. The only rule we have presently is Compatibility. The \"name\" is used to identify specific Rule type. The only rule supported in this is Compatibility.") + @NotNull + public Object getRule() { + return rule; + } + + public void setRule(Object rule) { + this.rule = rule; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SchemaValidationRule schemaValidationRule = (SchemaValidationRule) o; + return Objects.equals(this.rule, schemaValidationRule.rule); + } + + @Override + public int hashCode() { + return Objects.hash(rule); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class SchemaValidationRule {\n"); + + sb.append(" rule: ").append(toIndentedString(rule)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRules.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRules.java new file mode 100644 index 000000000..0f9d7af0b --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRules.java @@ -0,0 +1,103 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRule; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.validation.constraints.*; + +/** + * Schema validation rules to be applied for new schema addition. Currently only one rule is supported - Compatibility. + */ +@ApiModel(description = "Schema validation rules to be applied for new schema addition. Currently only one rule is supported - Compatibility.") + +public class SchemaValidationRules { + @JsonProperty("rules") + private Map rules = null; + + public SchemaValidationRules rules(Map rules) { + this.rules = rules; + return this; + } + + public SchemaValidationRules putRulesItem(String key, SchemaValidationRule rulesItem) { + if (this.rules == null) { + this.rules = new HashMap(); + } + this.rules.put(key, rulesItem); + return this; + } + + /** + * Get rules + * @return rules + **/ + @JsonProperty("rules") + @ApiModelProperty(value = "") + public Map getRules() { + return rules; + } + + public void setRules(Map rules) { + this.rules = rules; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SchemaValidationRules schemaValidationRules = (SchemaValidationRules) o; + return Objects.equals(this.rules, schemaValidationRules.rules); + } + + @Override + public int hashCode() { + return Objects.hash(rules); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class SchemaValidationRules {\n"); + + sb.append(" rules: ").append(toIndentedString(rules)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaVersionsList.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaVersionsList.java new file mode 100644 index 000000000..6be73a69d --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaVersionsList.java @@ -0,0 +1,102 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.ArrayList; +import java.util.List; +import javax.validation.constraints.*; + +/** + * List of schemas with their versions. + */ +@ApiModel(description = "List of schemas with their versions.") + +public class SchemaVersionsList { + @JsonProperty("schemas") + private List schemas = null; + + public SchemaVersionsList schemas(List schemas) { + this.schemas = schemas; + return this; + } + + public SchemaVersionsList addSchemasItem(SchemaWithVersion schemasItem) { + if (this.schemas == null) { + this.schemas = new ArrayList(); + } + this.schemas.add(schemasItem); + return this; + } + + /** + * List of schemas with their versions. + * @return schemas + **/ + @JsonProperty("schemas") + @ApiModelProperty(value = "List of schemas with their versions.") + public List getSchemas() { + return schemas; + } + + public void setSchemas(List schemas) { + this.schemas = schemas; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SchemaVersionsList schemaVersionsList = (SchemaVersionsList) o; + return Objects.equals(this.schemas, schemaVersionsList.schemas); + } + + @Override + public int hashCode() { + return Objects.hash(schemas); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class SchemaVersionsList {\n"); + + sb.append(" schemas: ").append(toIndentedString(schemas)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java new file mode 100644 index 000000000..bc0687fff --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java @@ -0,0 +1,119 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Object that encapsulates SchemaInfo and its corresponding VersionInfo objects. + */ +@ApiModel(description = "Object that encapsulates SchemaInfo and its corresponding VersionInfo objects.") + +public class SchemaWithVersion { + @JsonProperty("schemaInfo") + private SchemaInfo schemaInfo = null; + + @JsonProperty("version") + private VersionInfo version = null; + + public SchemaWithVersion schemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + return this; + } + + /** + * Schema information. + * @return schemaInfo + **/ + @JsonProperty("schemaInfo") + @ApiModelProperty(required = true, value = "Schema information.") + @NotNull + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + public void setSchemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + } + + public SchemaWithVersion version(VersionInfo version) { + this.version = version; + return this; + } + + /** + * Version information. + * @return version + **/ + @JsonProperty("version") + @ApiModelProperty(required = true, value = "Version information.") + @NotNull + public VersionInfo getVersion() { + return version; + } + + public void setVersion(VersionInfo version) { + this.version = version; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SchemaWithVersion schemaWithVersion = (SchemaWithVersion) o; + return Objects.equals(this.schemaInfo, schemaWithVersion.schemaInfo) && + Objects.equals(this.version, schemaWithVersion.version); + } + + @Override + public int hashCode() { + return Objects.hash(schemaInfo, version); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class SchemaWithVersion {\n"); + + sb.append(" schemaInfo: ").append(toIndentedString(schemaInfo)).append("\n"); + sb.append(" version: ").append(toIndentedString(version)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SerializationFormat.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SerializationFormat.java new file mode 100644 index 000000000..bc980cbd6 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SerializationFormat.java @@ -0,0 +1,154 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Serialization format enum that lists different serialization formats supported by the service. To use additional formats, use serializationFormat.Custom and supply customTypeName. + */ +@ApiModel(description = "Serialization format enum that lists different serialization formats supported by the service. To use additional formats, use serializationFormat.Custom and supply customTypeName.") + +public class SerializationFormat { + /** + * Gets or Sets serializationFormat + */ + public enum SerializationFormatEnum { + AVRO("Avro"), + + PROTOBUF("Protobuf"), + + JSON("Json"), + + ANY("Any"), + + CUSTOM("Custom"); + + private String value; + + SerializationFormatEnum(String value) { + this.value = value; + } + + @Override + @JsonValue + public String toString() { + return String.valueOf(value); + } + + @JsonCreator + public static SerializationFormatEnum fromValue(String text) { + for (SerializationFormatEnum b : SerializationFormatEnum.values()) { + if (String.valueOf(b.value).equals(text)) { + return b; + } + } + return null; + } + } + + @JsonProperty("serializationFormat") + private SerializationFormatEnum serializationFormat = null; + + @JsonProperty("customTypeName") + private String customTypeName = null; + + public SerializationFormat serializationFormat(SerializationFormatEnum serializationFormat) { + this.serializationFormat = serializationFormat; + return this; + } + + /** + * Get serializationFormat + * @return serializationFormat + **/ + @JsonProperty("serializationFormat") + @ApiModelProperty(required = true, value = "") + @NotNull + public SerializationFormatEnum getSerializationFormat() { + return serializationFormat; + } + + public void setSerializationFormat(SerializationFormatEnum serializationFormat) { + this.serializationFormat = serializationFormat; + } + + public SerializationFormat customTypeName(String customTypeName) { + this.customTypeName = customTypeName; + return this; + } + + /** + * Get customTypeName + * @return customTypeName + **/ + @JsonProperty("customTypeName") + @ApiModelProperty(value = "") + public String getCustomTypeName() { + return customTypeName; + } + + public void setCustomTypeName(String customTypeName) { + this.customTypeName = customTypeName; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SerializationFormat serializationFormat = (SerializationFormat) o; + return Objects.equals(this.serializationFormat, serializationFormat.serializationFormat) && + Objects.equals(this.customTypeName, serializationFormat.customTypeName); + } + + @Override + public int hashCode() { + return Objects.hash(serializationFormat, customTypeName); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class SerializationFormat {\n"); + + sb.append(" serializationFormat: ").append(toIndentedString(serializationFormat)).append("\n"); + sb.append(" customTypeName: ").append(toIndentedString(customTypeName)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateValidationRulesRequest.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateValidationRulesRequest.java new file mode 100644 index 000000000..92cdef2d9 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateValidationRulesRequest.java @@ -0,0 +1,116 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * UpdateValidationRulesRequest + */ + +public class UpdateValidationRulesRequest { + @JsonProperty("validationRules") + private SchemaValidationRules validationRules = null; + + @JsonProperty("previousRules") + private SchemaValidationRules previousRules = null; + + public UpdateValidationRulesRequest validationRules(SchemaValidationRules validationRules) { + this.validationRules = validationRules; + return this; + } + + /** + * Get validationRules + * @return validationRules + **/ + @JsonProperty("validationRules") + @ApiModelProperty(required = true, value = "") + @NotNull + public SchemaValidationRules getValidationRules() { + return validationRules; + } + + public void setValidationRules(SchemaValidationRules validationRules) { + this.validationRules = validationRules; + } + + public UpdateValidationRulesRequest previousRules(SchemaValidationRules previousRules) { + this.previousRules = previousRules; + return this; + } + + /** + * Get previousRules + * @return previousRules + **/ + @JsonProperty("previousRules") + @ApiModelProperty(value = "") + public SchemaValidationRules getPreviousRules() { + return previousRules; + } + + public void setPreviousRules(SchemaValidationRules previousRules) { + this.previousRules = previousRules; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + UpdateValidationRulesRequest updateValidationRulesRequest = (UpdateValidationRulesRequest) o; + return Objects.equals(this.validationRules, updateValidationRulesRequest.validationRules) && + Objects.equals(this.previousRules, updateValidationRulesRequest.previousRules); + } + + @Override + public int hashCode() { + return Objects.hash(validationRules, previousRules); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class UpdateValidationRulesRequest {\n"); + + sb.append(" validationRules: ").append(toIndentedString(validationRules)).append("\n"); + sb.append(" previousRules: ").append(toIndentedString(previousRules)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Valid.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Valid.java new file mode 100644 index 000000000..bde7b3f10 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Valid.java @@ -0,0 +1,92 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Response object for validateSchema api. + */ +@ApiModel(description = "Response object for validateSchema api.") + +public class Valid { + @JsonProperty("valid") + private Boolean valid = null; + + public Valid valid(Boolean valid) { + this.valid = valid; + return this; + } + + /** + * Whether given schema is valid with respect to existing group schemas against the configured validation rules. + * @return valid + **/ + @JsonProperty("valid") + @ApiModelProperty(required = true, value = "Whether given schema is valid with respect to existing group schemas against the configured validation rules.") + @NotNull + public Boolean isValid() { + return valid; + } + + public void setValid(Boolean valid) { + this.valid = valid; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Valid valid = (Valid) o; + return Objects.equals(this.valid, valid.valid); + } + + @Override + public int hashCode() { + return Objects.hash(valid); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class Valid {\n"); + + sb.append(" valid: ").append(toIndentedString(valid)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ValidateRequest.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ValidateRequest.java new file mode 100644 index 000000000..5daa183df --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ValidateRequest.java @@ -0,0 +1,117 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * ValidateRequest + */ + +public class ValidateRequest { + @JsonProperty("schemaInfo") + private SchemaInfo schemaInfo = null; + + @JsonProperty("validationRules") + private SchemaValidationRules validationRules = null; + + public ValidateRequest schemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + return this; + } + + /** + * Get schemaInfo + * @return schemaInfo + **/ + @JsonProperty("schemaInfo") + @ApiModelProperty(required = true, value = "") + @NotNull + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + public void setSchemaInfo(SchemaInfo schemaInfo) { + this.schemaInfo = schemaInfo; + } + + public ValidateRequest validationRules(SchemaValidationRules validationRules) { + this.validationRules = validationRules; + return this; + } + + /** + * Get validationRules + * @return validationRules + **/ + @JsonProperty("validationRules") + @ApiModelProperty(value = "") + public SchemaValidationRules getValidationRules() { + return validationRules; + } + + public void setValidationRules(SchemaValidationRules validationRules) { + this.validationRules = validationRules; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ValidateRequest validateRequest = (ValidateRequest) o; + return Objects.equals(this.schemaInfo, validateRequest.schemaInfo) && + Objects.equals(this.validationRules, validateRequest.validationRules); + } + + @Override + public int hashCode() { + return Objects.hash(schemaInfo, validationRules); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ValidateRequest {\n"); + + sb.append(" schemaInfo: ").append(toIndentedString(schemaInfo)).append("\n"); + sb.append(" validationRules: ").append(toIndentedString(validationRules)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/VersionInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/VersionInfo.java new file mode 100644 index 000000000..9b4c2603d --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/VersionInfo.java @@ -0,0 +1,142 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * Version information object. + */ +@ApiModel(description = "Version information object.") + +public class VersionInfo { + @JsonProperty("type") + private String type = null; + + @JsonProperty("version") + private Integer version = null; + + @JsonProperty("ordinal") + private Integer ordinal = null; + + public VersionInfo type(String type) { + this.type = type; + return this; + } + + /** + * Type of schema for this version. This is same value used in SchemaInfo#Type for the schema this version identifies. + * @return type + **/ + @JsonProperty("type") + @ApiModelProperty(required = true, value = "Type of schema for this version. This is same value used in SchemaInfo#Type for the schema this version identifies.") + @NotNull + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public VersionInfo version(Integer version) { + this.version = version; + return this; + } + + /** + * Version number that uniquely identifies the schema version among all schemas in the group that share the same Type. + * @return version + **/ + @JsonProperty("version") + @ApiModelProperty(required = true, value = "Version number that uniquely identifies the schema version among all schemas in the group that share the same Type.") + @NotNull + public Integer getVersion() { + return version; + } + + public void setVersion(Integer version) { + this.version = version; + } + + public VersionInfo ordinal(Integer ordinal) { + this.ordinal = ordinal; + return this; + } + + /** + * Version ordinal that uniquely identifies the position of the corresponding schema across all schemas in the group. + * @return ordinal + **/ + @JsonProperty("ordinal") + @ApiModelProperty(required = true, value = "Version ordinal that uniquely identifies the position of the corresponding schema across all schemas in the group.") + @NotNull + public Integer getOrdinal() { + return ordinal; + } + + public void setOrdinal(Integer ordinal) { + this.ordinal = ordinal; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + VersionInfo versionInfo = (VersionInfo) o; + return Objects.equals(this.type, versionInfo.type) && + Objects.equals(this.version, versionInfo.version) && + Objects.equals(this.ordinal, versionInfo.ordinal); + } + + @Override + public int hashCode() { + return Objects.hash(type, version, ordinal); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class VersionInfo {\n"); + + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" version: ").append(toIndentedString(version)).append("\n"); + sb.append(" ordinal: ").append(toIndentedString(ordinal)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiException.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiException.java new file mode 100644 index 000000000..096b7c1d1 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiException.java @@ -0,0 +1,10 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + + +public class ApiException extends Exception{ + private int code; + public ApiException (int code, String msg) { + super(msg); + this.code = code; + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiOriginFilter.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiOriginFilter.java new file mode 100644 index 000000000..1ad2cce34 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiOriginFilter.java @@ -0,0 +1,22 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import java.io.IOException; + +import javax.servlet.*; +import javax.servlet.http.HttpServletResponse; + + +public class ApiOriginFilter implements javax.servlet.Filter { + public void doFilter(ServletRequest request, ServletResponse response, + FilterChain chain) throws IOException, ServletException { + HttpServletResponse res = (HttpServletResponse) response; + res.addHeader("Access-Control-Allow-Origin", "*"); + res.addHeader("Access-Control-Allow-Methods", "GET, POST, DELETE, PUT"); + res.addHeader("Access-Control-Allow-Headers", "Content-Type"); + chain.doFilter(request, response); + } + + public void destroy() {} + + public void init(FilterConfig filterConfig) throws ServletException {} +} \ No newline at end of file diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiResponseMessage.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiResponseMessage.java new file mode 100644 index 000000000..47e3f5d76 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiResponseMessage.java @@ -0,0 +1,69 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import javax.xml.bind.annotation.XmlTransient; + +@javax.xml.bind.annotation.XmlRootElement + +public class ApiResponseMessage { + public static final int ERROR = 1; + public static final int WARNING = 2; + public static final int INFO = 3; + public static final int OK = 4; + public static final int TOO_BUSY = 5; + + int code; + String type; + String message; + + public ApiResponseMessage(){} + + public ApiResponseMessage(int code, String message){ + this.code = code; + switch(code){ + case ERROR: + setType("error"); + break; + case WARNING: + setType("warning"); + break; + case INFO: + setType("info"); + break; + case OK: + setType("ok"); + break; + case TOO_BUSY: + setType("too busy"); + break; + default: + setType("unknown"); + break; + } + this.message = message; + } + + @XmlTransient + public int getCode() { + return code; + } + + public void setCode(int code) { + this.code = code; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/Bootstrap.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/Bootstrap.java new file mode 100644 index 000000000..deb52b674 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/Bootstrap.java @@ -0,0 +1,31 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import io.swagger.jaxrs.config.SwaggerContextService; +import io.swagger.models.*; + +import io.swagger.models.auth.*; + +import javax.servlet.http.HttpServlet; +import javax.servlet.ServletContext; +import javax.servlet.ServletConfig; +import javax.servlet.ServletException; + +public class Bootstrap extends HttpServlet { + @Override + public void init(ServletConfig config) throws ServletException { + Info info = new Info() + .title("Swagger Server") + .description("REST APIs for Pravega Schema Registry.") + .termsOfService("") + .contact(new Contact() + .email("")) + .license(new License() + .name("Apache 2.0") + .url("http://www.apache.org/licenses/LICENSE-2.0")); + + ServletContext context = config.getServletContext(); + Swagger swagger = new Swagger().info(info); + + new SwaggerContextService().withServletConfig(config).updateSwagger(swagger); + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApi.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApi.java new file mode 100644 index 000000000..16db9f378 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApi.java @@ -0,0 +1,412 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import io.pravega.schemaregistry.contract.generated.rest.model.*; +import io.pravega.schemaregistry.contract.generated.rest.server.api.GroupsApiService; +import io.pravega.schemaregistry.contract.generated.rest.server.api.factories.GroupsApiServiceFactory; + +import io.swagger.annotations.ApiParam; +import io.swagger.jaxrs.*; + +import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; +import io.pravega.schemaregistry.contract.generated.rest.model.CreateGroupRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.GetEncodingIdRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; +import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.Valid; +import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; + +import java.util.Map; +import java.util.List; +import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; + +import java.io.InputStream; + +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; +import org.glassfish.jersey.media.multipart.FormDataParam; + +import javax.servlet.ServletConfig; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.SecurityContext; +import javax.ws.rs.*; +import javax.validation.constraints.*; + +@Path("/groups") + + +@io.swagger.annotations.Api(description = "the groups API") + +public class GroupsApi { + private final GroupsApiService delegate; + + public GroupsApi(@Context ServletConfig servletContext) { + GroupsApiService delegate = null; + + if (servletContext != null) { + String implClass = servletContext.getInitParameter("GroupsApi.implementation"); + if (implClass != null && !"".equals(implClass.trim())) { + try { + delegate = (GroupsApiService) Class.forName(implClass).newInstance(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + } + + if (delegate == null) { + delegate = GroupsApiServiceFactory.getGroupsApi(); + } + + this.delegate = delegate; + } + + @POST + @Path("/{groupName}/codecTypes") + @Consumes({ "application/json" }) + + @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new codecType to the group.", response = Void.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added codecType to group", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while registering codectype to a Group", response = Void.class) }) + public Response addCodecType(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "The codecType" ,required=true) String codecType +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.addCodecType(groupName,codecType,securityContext); + } + @POST + @Path("/{groupName}/schemas/versions") + @Consumes({ "application/json" }) + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new schema to the group", response = VersionInfo.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added schema to the group", response = VersionInfo.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 409, message = "Incompatible schema", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 417, message = "Invalid serialization format", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while adding schema to group", response = Void.class) }) + public Response addSchema(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Add new schema to group" ,required=true) SchemaInfo schemaInfo +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.addSchema(groupName,schemaInfo,securityContext); + } + @POST + @Path("/{groupName}/schemas/versions/canRead") + @Consumes({ "application/json" }) + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema can be used for reads subject to compatibility policy in the schema validation rules.", response = CanRead.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Response to tell whether schema can be used to read existing schemas", response = CanRead.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while checking schema for readability", response = Void.class) }) + public Response canRead(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Checks if schema can be used to read the data in the stream based on compatibility rules." ,required=true) SchemaInfo schemaInfo +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.canRead(groupName,schemaInfo,securityContext); + } + @POST + + @Consumes({ "application/json" }) + + @io.swagger.annotations.ApiOperation(value = "", notes = "Create a new Group", response = Void.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added group", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 409, message = "Group with given name already exists", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while creating a Group", response = Void.class) }) + public Response createGroup(@ApiParam(value = "The Group configuration" ,required=true) CreateGroupRequest createGroupRequest +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.createGroup(createGroupRequest,securityContext); + } + @DELETE + @Path("/{groupName}") + + + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete a Group", response = Void.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Successfully deleted the Group", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting the Group", response = Void.class) }) + public Response deleteGroup(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.deleteGroup(groupName,securityContext); + } + @DELETE + @Path("/{groupName}/schemas/{type}/versions/{version}") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema version from the group.", response = Void.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class) }) + public Response deleteSchemaVersion(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Schema type from SchemaInfo#type or VersionInfo#type",required=true) @PathParam("type") String type +,@ApiParam(value = "Version number",required=true) @PathParam("version") Integer version +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.deleteSchemaVersion(groupName,type,version,securityContext); + } + @DELETE + @Path("/{groupName}/schemas/versions/{versionOrdinal}") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema identified by version from the group.", response = Void.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class) }) + public Response deleteSchemaVersionOrinal(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Version ordinal",required=true) @PathParam("versionOrdinal") Integer versionOrdinal +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.deleteSchemaVersionOrinal(groupName,versionOrdinal,securityContext); + } + @GET + @Path("/{groupName}/codecTypes") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get codecTypes for the group.", response = CodecTypesList.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found CodecTypes", response = CodecTypesList.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching codecTypes registered", response = Void.class) }) + public Response getCodecTypesList(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getCodecTypesList(groupName,securityContext); + } + @PUT + @Path("/{groupName}/encodings") + @Consumes({ "application/json" }) + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get an encoding id that uniquely identifies a schema version and codec type pair.", response = EncodingId.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Encoding", response = EncodingId.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name or version not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 412, message = "Codec type not registered", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding id", response = Void.class) }) + public Response getEncodingId(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Get schema corresponding to the version" ,required=true) GetEncodingIdRequest getEncodingIdRequest +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getEncodingId(groupName,getEncodingIdRequest,securityContext); + } + @GET + @Path("/{groupName}/encodings/{encodingId}") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get the encoding information corresponding to the encoding id.", response = EncodingInfo.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Encoding", response = EncodingInfo.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding info corresponding to encoding id", response = Void.class) }) + public Response getEncodingInfo(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Encoding id that identifies a unique combination of schema and codec type",required=true) @PathParam("encodingId") Integer encodingId +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getEncodingInfo(groupName,encodingId,securityContext); + } + @GET + @Path("/{groupName}/history") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch the history of schema evolution of a Group", response = GroupHistory.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group history", response = GroupHistory.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group history", response = Void.class) }) + public Response getGroupHistory(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getGroupHistory(groupName,securityContext); + } + @GET + @Path("/{groupName}") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch the properties of an existing Group", response = GroupProperties.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group properties", response = GroupProperties.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class) }) + public Response getGroupProperties(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getGroupProperties(groupName,securityContext); + } + @GET + @Path("/{groupName}/schemas/{type}/versions/{version}") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class) }) + public Response getSchemaFromVersion(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Schema type from SchemaInfo#type or VersionInfo#type",required=true) @PathParam("type") String type +,@ApiParam(value = "Version number",required=true) @PathParam("version") Integer version +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getSchemaFromVersion(groupName,type,version,securityContext); + } + @GET + @Path("/{groupName}/schemas/versions/{versionOrdinal}") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class) }) + public Response getSchemaFromVersionOrdinal(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Version ordinal",required=true) @PathParam("versionOrdinal") Integer versionOrdinal +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getSchemaFromVersionOrdinal(groupName,versionOrdinal,securityContext); + } + @POST + @Path("/{groupName}/schemas/versions/find") + @Consumes({ "application/json" }) + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get the version for the schema if it is registered. It does not automatically register the schema. To add new schema use addSchema", response = VersionInfo.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema version", response = VersionInfo.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error fetching version for schema", response = Void.class) }) + public Response getSchemaVersion(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Get schema corresponding to the version" ,required=true) SchemaInfo schemaInfo +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getSchemaVersion(groupName,schemaInfo,securityContext); + } + @GET + @Path("/{groupName}/schemas/versions") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get all schema versions for the group", response = SchemaVersionsList.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Versioned history of schemas registered under the group", response = SchemaVersionsList.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group schema versions", response = Void.class) }) + public Response getSchemaVersions(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Type of object the schema describes.") @QueryParam("type") String type +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getSchemaVersions(groupName,type,securityContext); + } + @GET + @Path("/{groupName}/schemas") + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch latest schema versions for all objects identified by SchemaInfo#type under a Group. If query param type is specified then latest schema for the type is returned.", response = SchemaVersionsList.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Latest schemas for all objects identified by SchemaInfo#type under the group", response = SchemaVersionsList.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group's latest schemas", response = Void.class) }) + public Response getSchemas(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Type of object") @QueryParam("type") String type +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getSchemas(groupName,type,securityContext); + } + @GET + + + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "List all groups", response = ListGroupsResponse.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "List of all groups", response = ListGroupsResponse.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching the list of Groups", response = Void.class) }) + public Response listGroups(@ApiParam(value = "Continuation token") @QueryParam("continuationToken") String continuationToken +,@ApiParam(value = "The numbers of items to return") @QueryParam("limit") Integer limit +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.listGroups(continuationToken,limit,securityContext); + } + @PUT + @Path("/{groupName}/rules") + @Consumes({ "application/json" }) + + @io.swagger.annotations.ApiOperation(value = "", notes = "update schema validation rules of an existing Group", response = Void.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Updated schema validation policy", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 409, message = "Write conflict", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while updating Group's schema validation rules", response = Void.class) }) + public Response updateSchemaValidationRules(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "update group policy" ,required=true) UpdateValidationRulesRequest updateValidationRulesRequest +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.updateSchemaValidationRules(groupName,updateValidationRulesRequest,securityContext); + } + @POST + @Path("/{groupName}/schemas/versions/validate") + @Consumes({ "application/json" }) + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema is compatible with schemas in the registry for current policy setting.", response = Valid.class, tags={ "Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema validation response", response = Valid.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while trying to validate schema", response = Void.class) }) + public Response validate(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName +,@ApiParam(value = "Checks if schema is valid with respect to supplied validation rules" ,required=true) ValidateRequest validateRequest +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.validate(groupName,validateRequest,securityContext); + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApiService.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApiService.java new file mode 100644 index 000000000..dd8d9ef40 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApiService.java @@ -0,0 +1,54 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import io.pravega.schemaregistry.contract.generated.rest.server.api.*; +import io.pravega.schemaregistry.contract.generated.rest.model.*; + +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; + +import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; +import io.pravega.schemaregistry.contract.generated.rest.model.CreateGroupRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.GetEncodingIdRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; +import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.Valid; +import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; + +import java.util.List; +import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; + +import java.io.InputStream; + +import javax.ws.rs.core.Response; +import javax.ws.rs.core.SecurityContext; +import javax.validation.constraints.*; + +public abstract class GroupsApiService { + public abstract Response addCodecType(String groupName,String codecType,SecurityContext securityContext) throws NotFoundException; + public abstract Response addSchema(String groupName,SchemaInfo schemaInfo,SecurityContext securityContext) throws NotFoundException; + public abstract Response canRead(String groupName,SchemaInfo schemaInfo,SecurityContext securityContext) throws NotFoundException; + public abstract Response createGroup(CreateGroupRequest createGroupRequest,SecurityContext securityContext) throws NotFoundException; + public abstract Response deleteGroup(String groupName,SecurityContext securityContext) throws NotFoundException; + public abstract Response deleteSchemaVersion(String groupName,String type,Integer version,SecurityContext securityContext) throws NotFoundException; + public abstract Response deleteSchemaVersionOrinal(String groupName,Integer versionOrdinal,SecurityContext securityContext) throws NotFoundException; + public abstract Response getCodecTypesList(String groupName,SecurityContext securityContext) throws NotFoundException; + public abstract Response getEncodingId(String groupName,GetEncodingIdRequest getEncodingIdRequest,SecurityContext securityContext) throws NotFoundException; + public abstract Response getEncodingInfo(String groupName,Integer encodingId,SecurityContext securityContext) throws NotFoundException; + public abstract Response getGroupHistory(String groupName,SecurityContext securityContext) throws NotFoundException; + public abstract Response getGroupProperties(String groupName,SecurityContext securityContext) throws NotFoundException; + public abstract Response getSchemaFromVersion(String groupName,String type,Integer version,SecurityContext securityContext) throws NotFoundException; + public abstract Response getSchemaFromVersionOrdinal(String groupName,Integer versionOrdinal,SecurityContext securityContext) throws NotFoundException; + public abstract Response getSchemaVersion(String groupName,SchemaInfo schemaInfo,SecurityContext securityContext) throws NotFoundException; + public abstract Response getSchemaVersions(String groupName, String type,SecurityContext securityContext) throws NotFoundException; + public abstract Response getSchemas(String groupName, String type,SecurityContext securityContext) throws NotFoundException; + public abstract Response listGroups( String continuationToken, Integer limit,SecurityContext securityContext) throws NotFoundException; + public abstract Response updateSchemaValidationRules(String groupName,UpdateValidationRulesRequest updateValidationRulesRequest,SecurityContext securityContext) throws NotFoundException; + public abstract Response validate(String groupName,ValidateRequest validateRequest,SecurityContext securityContext) throws NotFoundException; +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/JacksonJsonProvider.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/JacksonJsonProvider.java new file mode 100644 index 000000000..e6179d25f --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/JacksonJsonProvider.java @@ -0,0 +1,18 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import com.fasterxml.jackson.databind.ObjectMapper; +import io.swagger.util.Json; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.ext.Provider; +import org.glassfish.jersey.jackson.internal.jackson.jaxrs.json.JacksonJaxbJsonProvider; + +@Provider +@Produces({MediaType.APPLICATION_JSON}) +public class JacksonJsonProvider extends JacksonJaxbJsonProvider { + private static ObjectMapper commonMapper = Json.mapper(); + + public JacksonJsonProvider() { + super.setMapper(commonMapper); + } +} \ No newline at end of file diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/NotFoundException.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/NotFoundException.java new file mode 100644 index 000000000..e9d99721b --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/NotFoundException.java @@ -0,0 +1,10 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + + +public class NotFoundException extends ApiException { + private int code; + public NotFoundException (int code, String msg) { + super(code, msg); + this.code = code; + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApi.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApi.java new file mode 100644 index 000000000..295bd1d86 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApi.java @@ -0,0 +1,74 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import io.pravega.schemaregistry.contract.generated.rest.model.*; +import io.pravega.schemaregistry.contract.generated.rest.server.api.SchemasApiService; +import io.pravega.schemaregistry.contract.generated.rest.server.api.factories.SchemasApiServiceFactory; + +import io.swagger.annotations.ApiParam; +import io.swagger.jaxrs.*; + +import io.pravega.schemaregistry.contract.generated.rest.model.AddedTo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; + +import java.util.Map; +import java.util.List; +import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; + +import java.io.InputStream; + +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; +import org.glassfish.jersey.media.multipart.FormDataParam; + +import javax.servlet.ServletConfig; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.SecurityContext; +import javax.ws.rs.*; +import javax.validation.constraints.*; + +@Path("/schemas") + + +@io.swagger.annotations.Api(description = "the schemas API") + +public class SchemasApi { + private final SchemasApiService delegate; + + public SchemasApi(@Context ServletConfig servletContext) { + SchemasApiService delegate = null; + + if (servletContext != null) { + String implClass = servletContext.getInitParameter("SchemasApi.implementation"); + if (implClass != null && !"".equals(implClass.trim())) { + try { + delegate = (SchemasApiService) Class.forName(implClass).newInstance(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + } + + if (delegate == null) { + delegate = SchemasApiServiceFactory.getSchemasApi(); + } + + this.delegate = delegate; + } + + @POST + @Path("/addedTo") + @Consumes({ "application/json" }) + @Produces({ "application/json" }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Gets a map of groups to version info where the schema if it is registered. SchemaInfo#properties is ignored while comparing the schema.", response = AddedTo.class, tags={ "Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema version", response = AddedTo.class), + + @io.swagger.annotations.ApiResponse(code = 404, message = "Schema not found", response = Void.class), + + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Schema references", response = Void.class) }) + public Response getSchemaReferences(@ApiParam(value = "Get schema references for the supplied schema" ,required=true) SchemaInfo schemaInfo +,@Context SecurityContext securityContext) + throws NotFoundException { + return delegate.getSchemaReferences(schemaInfo,securityContext); + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApiService.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApiService.java new file mode 100644 index 000000000..bcc19dd03 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApiService.java @@ -0,0 +1,22 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + +import io.pravega.schemaregistry.contract.generated.rest.server.api.*; +import io.pravega.schemaregistry.contract.generated.rest.model.*; + +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; + +import io.pravega.schemaregistry.contract.generated.rest.model.AddedTo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; + +import java.util.List; +import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; + +import java.io.InputStream; + +import javax.ws.rs.core.Response; +import javax.ws.rs.core.SecurityContext; +import javax.validation.constraints.*; + +public abstract class SchemasApiService { + public abstract Response getSchemaReferences(SchemaInfo schemaInfo,SecurityContext securityContext) throws NotFoundException; +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java new file mode 100644 index 000000000..5d19e5e5f --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java @@ -0,0 +1,42 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api; + + +public class StringUtil { + /** + * Check if the given array contains the given value (with case-insensitive comparison). + * + * @param array The array + * @param value The value to search + * @return true if the array contains the value + */ + public static boolean containsIgnoreCase(String[] array, String value) { + for (String str : array) { + if (value == null && str == null) return true; + if (value != null && value.equalsIgnoreCase(str)) return true; + } + return false; + } + + /** + * Join an array of strings with the given separator. + * + * Note: This might be replaced by utility method from commons-lang or guava someday + * if one of those libraries is added as dependency. + *

+ * + * @param array The array of strings + * @param separator The separator + * @return the resulting string + */ + public static String join(String[] array, String separator) { + int len = array.length; + if (len == 0) return ""; + + StringBuilder out = new StringBuilder(); + out.append(array[0]); + for (int i = 1; i < len; i++) { + out.append(separator).append(array[i]); + } + return out.toString(); + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/GroupsApiServiceFactory.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/GroupsApiServiceFactory.java new file mode 100644 index 000000000..3145181ad --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/GroupsApiServiceFactory.java @@ -0,0 +1,13 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api.factories; + +import io.pravega.schemaregistry.contract.generated.rest.server.api.GroupsApiService; +import io.pravega.schemaregistry.contract.generated.rest.server.api.impl.GroupsApiServiceImpl; + + +public class GroupsApiServiceFactory { + private final static GroupsApiService service = new GroupsApiServiceImpl(); + + public static GroupsApiService getGroupsApi() { + return service; + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/SchemasApiServiceFactory.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/SchemasApiServiceFactory.java new file mode 100644 index 000000000..8587b6fef --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/SchemasApiServiceFactory.java @@ -0,0 +1,13 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api.factories; + +import io.pravega.schemaregistry.contract.generated.rest.server.api.SchemasApiService; +import io.pravega.schemaregistry.contract.generated.rest.server.api.impl.SchemasApiServiceImpl; + + +public class SchemasApiServiceFactory { + private final static SchemasApiService service = new SchemasApiServiceImpl(); + + public static SchemasApiService getSchemasApi() { + return service; + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/GroupsApiServiceImpl.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/GroupsApiServiceImpl.java new file mode 100644 index 000000000..97aea99a9 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/GroupsApiServiceImpl.java @@ -0,0 +1,134 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api.impl; + +import io.pravega.schemaregistry.contract.generated.rest.server.api.*; +import io.pravega.schemaregistry.contract.generated.rest.model.*; + +import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; +import io.pravega.schemaregistry.contract.generated.rest.model.CreateGroupRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.GetEncodingIdRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; +import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.Valid; +import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; + +import java.util.List; +import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; + +import java.io.InputStream; + +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; + +import javax.ws.rs.core.Response; +import javax.ws.rs.core.SecurityContext; +import javax.validation.constraints.*; + +public class GroupsApiServiceImpl extends GroupsApiService { + @Override + public Response addCodecType(String groupName, String codecType, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response addSchema(String groupName, SchemaInfo schemaInfo, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response canRead(String groupName, SchemaInfo schemaInfo, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response createGroup(CreateGroupRequest createGroupRequest, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response deleteGroup(String groupName, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response deleteSchemaVersion(String groupName, String type, Integer version, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response deleteSchemaVersionOrinal(String groupName, Integer versionOrdinal, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getCodecTypesList(String groupName, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getEncodingId(String groupName, GetEncodingIdRequest getEncodingIdRequest, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getEncodingInfo(String groupName, Integer encodingId, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getGroupHistory(String groupName, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getGroupProperties(String groupName, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getSchemaFromVersion(String groupName, String type, Integer version, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getSchemaFromVersionOrdinal(String groupName, Integer versionOrdinal, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getSchemaVersion(String groupName, SchemaInfo schemaInfo, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getSchemaVersions(String groupName, String type, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response getSchemas(String groupName, String type, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response listGroups( String continuationToken, Integer limit, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response updateSchemaValidationRules(String groupName, UpdateValidationRulesRequest updateValidationRulesRequest, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } + @Override + public Response validate(String groupName, ValidateRequest validateRequest, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/SchemasApiServiceImpl.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/SchemasApiServiceImpl.java new file mode 100644 index 000000000..565f0f2fb --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/SchemasApiServiceImpl.java @@ -0,0 +1,26 @@ +package io.pravega.schemaregistry.contract.generated.rest.server.api.impl; + +import io.pravega.schemaregistry.contract.generated.rest.server.api.*; +import io.pravega.schemaregistry.contract.generated.rest.model.*; + +import io.pravega.schemaregistry.contract.generated.rest.model.AddedTo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; + +import java.util.List; +import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; + +import java.io.InputStream; + +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; + +import javax.ws.rs.core.Response; +import javax.ws.rs.core.SecurityContext; +import javax.validation.constraints.*; + +public class SchemasApiServiceImpl extends SchemasApiService { + @Override + public Response getSchemaReferences(SchemaInfo schemaInfo, SecurityContext securityContext) throws NotFoundException { + // do some magic! + return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java new file mode 100644 index 000000000..fa261ab42 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java @@ -0,0 +1,243 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.transform; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import io.pravega.schemaregistry.contract.generated.rest.model.Compatibility; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRule; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion; +import io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import org.apache.commons.lang3.NotImplementedException; + +import java.nio.ByteBuffer; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Provides translation (encode/decode) between the Model classes and its REST representation. + */ +public class ModelHelper { + private static final ObjectMapper MAPPER = new ObjectMapper(); + + // region decode + public static io.pravega.schemaregistry.contract.data.SchemaInfo decode(SchemaInfo schemaInfo) { + Preconditions.checkArgument(schemaInfo != null); + Preconditions.checkArgument(schemaInfo.getType() != null); + Preconditions.checkArgument(schemaInfo.getSerializationFormat() != null); + Preconditions.checkArgument(schemaInfo.getProperties() != null); + Preconditions.checkArgument(schemaInfo.getSchemaData() != null); + io.pravega.schemaregistry.contract.data.SerializationFormat serializationFormat = decode(schemaInfo.getSerializationFormat()); + return new io.pravega.schemaregistry.contract.data.SchemaInfo(schemaInfo.getType(), + serializationFormat, ByteBuffer.wrap(schemaInfo.getSchemaData()), ImmutableMap.copyOf(schemaInfo.getProperties())); + } + + public static io.pravega.schemaregistry.contract.data.SerializationFormat decode(SerializationFormat serializationFormat) { + Preconditions.checkArgument(serializationFormat != null); + switch (serializationFormat.getSerializationFormat()) { + case CUSTOM: + Preconditions.checkArgument(serializationFormat.getCustomTypeName() != null); + return io.pravega.schemaregistry.contract.data.SerializationFormat.custom(serializationFormat.getCustomTypeName()); + default: + return searchEnum(io.pravega.schemaregistry.contract.data.SerializationFormat.class, serializationFormat.getSerializationFormat().name()); + } + } + + public static io.pravega.schemaregistry.contract.data.SchemaValidationRules decode(SchemaValidationRules rules) { + Preconditions.checkArgument(rules != null); + Preconditions.checkArgument(rules.getRules() != null); + List list = rules.getRules().entrySet().stream().map(rule -> { + if (rule.getValue().getRule() instanceof Map) { + String name = (String) ((Map) rule.getValue().getRule()).get("name"); + Preconditions.checkArgument(name.equals(Compatibility.class.getSimpleName())); + + return decode(MAPPER.convertValue(rule.getValue().getRule(), Compatibility.class)); + } else if (rule.getValue().getRule() instanceof Compatibility) { + return decode((Compatibility) rule.getValue().getRule()); + } else { + throw new IllegalArgumentException("Rule not supported"); + } + }).collect(Collectors.toList()); + return io.pravega.schemaregistry.contract.data.SchemaValidationRules.of(list); + } + + public static io.pravega.schemaregistry.contract.data.Compatibility decode(Compatibility compatibility) { + Preconditions.checkArgument(compatibility.getName() != null); + Preconditions.checkArgument(compatibility.getPolicy() != null); + if (compatibility.getPolicy().equals(Compatibility.PolicyEnum.BACKWARDTILL)) { + Preconditions.checkArgument(compatibility.getBackwardTill() != null); + } + if (compatibility.getPolicy().equals(Compatibility.PolicyEnum.FORWARDTILL)) { + Preconditions.checkArgument(compatibility.getForwardTill() != null); + } + if (compatibility.getPolicy().equals(Compatibility.PolicyEnum.BACKWARDANDFORWARDTILL)) { + Preconditions.checkArgument(compatibility.getBackwardTill() != null); + Preconditions.checkArgument(compatibility.getForwardTill() != null); + } + + io.pravega.schemaregistry.contract.data.VersionInfo backwardTill = compatibility.getBackwardTill() == null ? null : decode(compatibility.getBackwardTill()); + io.pravega.schemaregistry.contract.data.VersionInfo forwardTill = compatibility.getForwardTill() == null ? null : decode(compatibility.getForwardTill()); + + return new io.pravega.schemaregistry.contract.data.Compatibility( + searchEnum(io.pravega.schemaregistry.contract.data.Compatibility.Type.class, compatibility.getPolicy().name()), + backwardTill, forwardTill); + } + + public static io.pravega.schemaregistry.contract.data.VersionInfo decode(VersionInfo versionInfo) { + Preconditions.checkArgument(versionInfo != null); + Preconditions.checkArgument(versionInfo.getType() != null); + Preconditions.checkArgument(versionInfo.getVersion() != null); + Preconditions.checkArgument(versionInfo.getOrdinal() != null); + return new io.pravega.schemaregistry.contract.data.VersionInfo(versionInfo.getType(), versionInfo.getVersion(), versionInfo.getOrdinal()); + } + + public static io.pravega.schemaregistry.contract.data.EncodingInfo decode(EncodingInfo encodingInfo) { + Preconditions.checkArgument(encodingInfo != null); + return new io.pravega.schemaregistry.contract.data.EncodingInfo(decode(encodingInfo.getVersionInfo()), + decode(encodingInfo.getSchemaInfo()), encodingInfo.getCodecType()); + } + + public static io.pravega.schemaregistry.contract.data.SchemaWithVersion decode(SchemaWithVersion schemaWithVersion) { + Preconditions.checkArgument(schemaWithVersion != null); + return new io.pravega.schemaregistry.contract.data.SchemaWithVersion(decode(schemaWithVersion.getSchemaInfo()), + decode(schemaWithVersion.getVersion())); + } + + public static io.pravega.schemaregistry.contract.data.GroupHistoryRecord decode(GroupHistoryRecord schemaEvolution) { + Preconditions.checkArgument(schemaEvolution != null); + + return new io.pravega.schemaregistry.contract.data.GroupHistoryRecord(decode(schemaEvolution.getSchemaInfo()), + decode(schemaEvolution.getVersion()), decode(schemaEvolution.getValidationRules()), schemaEvolution.getTimestamp(), + schemaEvolution.getSchemaString()); + } + + public static io.pravega.schemaregistry.contract.data.EncodingId decode(EncodingId encodingId) { + Preconditions.checkArgument(encodingId != null); + Preconditions.checkArgument(encodingId.getEncodingId() != null); + + return new io.pravega.schemaregistry.contract.data.EncodingId(encodingId.getEncodingId()); + } + + public static io.pravega.schemaregistry.contract.data.GroupProperties decode(GroupProperties groupProperties) { + Preconditions.checkArgument(groupProperties != null); + Preconditions.checkArgument(groupProperties.isAllowMultipleTypes() != null); + + return io.pravega.schemaregistry.contract.data.GroupProperties.builder().serializationFormat(decode(groupProperties.getSerializationFormat())) + .schemaValidationRules(decode(groupProperties.getSchemaValidationRules())).allowMultipleTypes(groupProperties.isAllowMultipleTypes()) + .properties(ImmutableMap.copyOf(groupProperties.getProperties())).build(); + } + // endregion + + // region encode + public static GroupHistoryRecord encode(io.pravega.schemaregistry.contract.data.GroupHistoryRecord groupHistoryRecord) { + return new GroupHistoryRecord().schemaInfo(encode(groupHistoryRecord.getSchema())) + .version(encode(groupHistoryRecord.getVersion())) + .validationRules(encode(groupHistoryRecord.getRules())) + .timestamp(groupHistoryRecord.getTimestamp()) + .schemaString(groupHistoryRecord.getSchemaString()); + } + + public static SchemaValidationRules encode(io.pravega.schemaregistry.contract.data.SchemaValidationRules rules) { + Map map = rules.getRules().entrySet().stream().collect(Collectors.toMap(rule -> { + if (rule.getValue() instanceof io.pravega.schemaregistry.contract.data.Compatibility) { + return io.pravega.schemaregistry.contract.generated.rest.model.Compatibility.class.getSimpleName(); + } else { + throw new NotImplementedException("Rule not implemented"); + } + }, rule -> { + SchemaValidationRule schemaValidationRule; + if (rule.getValue() instanceof io.pravega.schemaregistry.contract.data.Compatibility) { + schemaValidationRule = new SchemaValidationRule().rule(encode((io.pravega.schemaregistry.contract.data.Compatibility) rule.getValue())); + } else { + throw new NotImplementedException("Rule not implemented"); + } + return schemaValidationRule; + })); + return new SchemaValidationRules().rules(map); + } + + public static Compatibility encode(io.pravega.schemaregistry.contract.data.Compatibility compatibility) { + Compatibility policy = new io.pravega.schemaregistry.contract.generated.rest.model.Compatibility() + .name(compatibility.getName()) + .policy(searchEnum(Compatibility.PolicyEnum.class, compatibility.getCompatibility().name())); + if (compatibility.getBackwardTill() != null) { + VersionInfo backwardTill = encode(compatibility.getBackwardTill()); + policy = policy.backwardTill(backwardTill); + } + if (compatibility.getForwardTill() != null) { + VersionInfo forwardTill = encode(compatibility.getForwardTill()); + policy = policy.forwardTill(forwardTill); + } + return policy; + } + + public static SchemaWithVersion encode(io.pravega.schemaregistry.contract.data.SchemaWithVersion schemaWithVersion) { + return new SchemaWithVersion().schemaInfo(encode(schemaWithVersion.getSchemaInfo())) + .version(encode(schemaWithVersion.getVersionInfo())); + } + + public static GroupProperties encode(io.pravega.schemaregistry.contract.data.GroupProperties groupProperties) { + return new GroupProperties() + .serializationFormat(encode(groupProperties.getSerializationFormat())) + .properties(groupProperties.getProperties()) + .allowMultipleTypes(groupProperties.isAllowMultipleTypes()) + .schemaValidationRules(encode(groupProperties.getSchemaValidationRules())); + } + + public static VersionInfo encode(io.pravega.schemaregistry.contract.data.VersionInfo versionInfo) { + return new VersionInfo().type(versionInfo.getType()).version(versionInfo.getVersion()).ordinal(versionInfo.getOrdinal()); + } + + public static SchemaInfo encode(io.pravega.schemaregistry.contract.data.SchemaInfo schemaInfo) { + return new SchemaInfo().properties(schemaInfo.getProperties()).schemaData(schemaInfo.getSchemaData().array()) + .type(schemaInfo.getType()).serializationFormat(encode(schemaInfo.getSerializationFormat())); + } + + public static SerializationFormat encode(io.pravega.schemaregistry.contract.data.SerializationFormat serializationFormat) { + if (serializationFormat.equals(io.pravega.schemaregistry.contract.data.SerializationFormat.Custom)) { + Preconditions.checkArgument(serializationFormat.getCustomTypeName() != null); + SerializationFormat serializationFormatModel = new SerializationFormat().serializationFormat(SerializationFormat.SerializationFormatEnum.CUSTOM); + return serializationFormatModel.customTypeName(serializationFormat.getCustomTypeName()); + } else { + return new SerializationFormat().serializationFormat( + searchEnum(SerializationFormat.SerializationFormatEnum.class, serializationFormat.name())); + } + } + + public static EncodingId encode(io.pravega.schemaregistry.contract.data.EncodingId encodingId) { + return new EncodingId().encodingId(encodingId.getId()); + } + + public static EncodingInfo encode(io.pravega.schemaregistry.contract.data.EncodingInfo encodingInfo) { + return new EncodingInfo().codecType(encodingInfo.getCodecType()) + .versionInfo(encode(encodingInfo.getVersionInfo())) + .schemaInfo(encode(encodingInfo.getSchemaInfo())); + } + + // endregion + + private static > T searchEnum(Class enumeration, String search) { + for (T each : enumeration.getEnumConstants()) { + if (each.name().compareToIgnoreCase(search) == 0) { + return each; + } + } + throw new IllegalArgumentException(); + } +} \ No newline at end of file diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java new file mode 100644 index 000000000..7ce526198 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java @@ -0,0 +1,554 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.v1; + +import io.pravega.schemaregistry.contract.generated.rest.model.AddedTo; +import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; +import io.pravega.schemaregistry.contract.generated.rest.model.CreateGroupRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.GetEncodingIdRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; +import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.Valid; +import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; +import io.swagger.annotations.ApiParam; + +import javax.ws.rs.Consumes; +import javax.ws.rs.DELETE; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.container.AsyncResponse; +import javax.ws.rs.container.Suspended; +import javax.ws.rs.core.Response; + +public class ApiV1 { + @Path("/ping") + public interface Ping { + @GET + Response ping(); + } + + /** + * Sync Group apis. Identical to {@link GroupsApiAsync}. All methods in this interface are synchronous and return {@link Response} object. + * The purposes of this interface is to be used by proxy-client. + */ + @Path("/v1/groups") + @io.swagger.annotations.Api(description = "the groups API") + public interface GroupsApi { + @POST + @Path("/{groupName}/codecTypes") + @Consumes({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new codecType to the group.", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added codecType to group", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while creating a Group", response = Void.class)}) + Response addCodecType(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "The codec type", required = true) String codecType); + + @POST + @Path("/{groupName}/schemas/versions") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new schema to the group", response = VersionInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added schema to the group", response = VersionInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 409, message = "Incompatible schema", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 417, message = "Invalid serialization format", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while creating a Group", response = Void.class)}) + Response addSchema(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Add new schema to group", required = true) SchemaInfo schemaInfo); + + @POST + @Path("/{groupName}/schemas/versions/canRead") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema can be used for reads subject to compatibility policy in the schema validation rules.", response = CanRead.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Response to tell whether schema can be used to read existing schemas", response = CanRead.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while checking schema for readability", response = Void.class)}) + Response canRead(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Checks if schema can be used to read the data in the stream based on compatibility rules.", required = true) SchemaInfo schemaInfo); + + @POST + @Consumes({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Create a new Group", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added group", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 409, message = "Group with given name already exists", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while creating a Group", response = Void.class)}) + Response createGroup(@ApiParam(value = "The Group configuration", required = true) CreateGroupRequest createGroupRequest); + + @DELETE + @Path("/{groupName}") + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete a Group", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Successfully deleted the Group", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting the Group", response = Void.class)}) + Response deleteGroup(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName); + + @GET + @Path("/{groupName}/codecTypes") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get codecTypes for the group.", response = CodecTypesList.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found CodecTypes", response = CodecTypesList.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching codecTypes registered", response = Void.class)}) + Response getCodecTypesList(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName); + + @GET + @Path("/{groupName}/encodings/{encodingId}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get the encoding information corresponding to the encoding id.", response = EncodingInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Encoding", response = EncodingInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding info corresponding to encoding id", response = Void.class)}) + Response getEncodingInfo(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Encoding id that identifies a unique combination of schema and codecType", required = true) @PathParam("encodingId") Integer encodingId); + + @GET + @Path("/{groupName}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch the properties of an existing Group", response = GroupProperties.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group properties", response = GroupProperties.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) + Response getGroupProperties(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName); + + @GET + @Path("/{groupName}/history") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch the history of schema evolution of a Group", response = GroupHistory.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group history", response = GroupHistory.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group history", response = Void.class)}) + Response getGroupHistory(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName); + + @GET + @Path("/{groupName}/schemas/versions") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get all schema versions for the group", response = SchemaVersionsList.class, tags = {"Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Versioned history of schemas registered under the group", response = SchemaVersionsList.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) + Response getSchemaVersions(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Type") @QueryParam("type") String type); + + @GET + @Path("/{groupName}/schemas") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch latest schema versions for all objects identified by SchemaInfo#type under a Group. If query param type is specified then latest schema for the type is returned.", response = SchemaVersionsList.class, tags = {"Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Latest schemas for all objects identified by SchemaInfo#type under the group", response = SchemaVersionsList.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) + Response getSchemas(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Type of object") @QueryParam("type") String type); + + @PUT + @Path("/{groupName}/encodings") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get an encoding id that uniquely identifies a schema version and codec type pair.", response = EncodingId.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Encoding", response = EncodingId.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name or version not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 412, message = "Codec type not registered", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding id", response = Void.class)}) + Response getEncodingId(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Get schema corresponding to the version", required = true) GetEncodingIdRequest getEncodingIdRequest); + + @DELETE + @Path("/{groupName}/schemas/versions/{versionOrdinal}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema version from the group.", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version deleted", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class)}) + Response deleteSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version); + + @GET + @Path("/{groupName}/schemas/versions/{versionOrdinal}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class)}) + Response getSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version); + + @GET + @Path("/{groupName}/schemas/{type}/versions/{version}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class)}) + public Response getSchemaFromVersion(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Schema type from SchemaInfo#type or VersionInfo#type", required = true) @PathParam("type") String type, + @ApiParam(value = "Version number", required = true) @PathParam("version") Integer version); + + @DELETE + @Path("/{groupName}/schemas/{type}/versions/{version}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema version from the group.", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class)}) + Response deleteSchemaVersion(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Schema type from SchemaInfo#type or VersionInfo#type", required = true) @PathParam("type") String type, + @ApiParam(value = "Version number", required = true) @PathParam("version") Integer version); + + @POST + @Path("/{groupName}/schemas/versions/find") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get the version for the schema if it is registered.", response = VersionInfo.class, tags = {"Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema version", response = VersionInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) + Response getSchemaVersion(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Get schema corresponding to the version", required = true) SchemaInfo schemaInfo); + + @GET + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "List all groups", response = ListGroupsResponse.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "List of all groups", response = ListGroupsResponse.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching the list of Groups", response = Void.class)}) + Response listGroups(@ApiParam(value = "Continuation token") @QueryParam("continuationToken") String continuationToken, + @ApiParam(value = "The numbers of items to return") @QueryParam("limit") Integer limit); + + @PUT + @Path("/{groupName}/rules") + @Consumes({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "update schema validation rules of an existing Group", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Updated schema validation policy", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 409, message = "Write conflict", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while updating Group's schema validation rules", response = Void.class)}) + Response updateSchemaValidationRules(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "update group policy", required = true) UpdateValidationRulesRequest updateValidationRulesRequest); + + @POST + @Path("/{groupName}/schemas/versions/validate") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema is compatible with schemas in the registry for current policy setting.", response = Valid.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema validation response", response = Valid.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while trying to validate schema", response = Void.class)}) + Response validate(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Checks if schema is valid with respect to supplied validation rules", required = true) ValidateRequest validateRequest); + } + + /** + * ASync Group apis. Identical to {@link GroupsApi}. All methods in this interface are asynchronous and use + * {@link AsyncResponse}. This is used on service side so that all api implementation is asynchronous. + */ + @Path("/v1/groups") + @io.swagger.annotations.Api(description = "the groups API") + public interface GroupsApiAsync { + @POST + @Path("/{groupName}/codecTypes") + @Consumes({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new codecType to the group.", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added codecType to group", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while registering codectype to a Group", response = Void.class)}) + void addCodecType(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Add codec type", required = true) String codecType, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @POST + @Path("/{groupName}/schemas/versions") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new schema to the group", response = VersionInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added schema to the group", response = VersionInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 409, message = "Incompatible schema", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 417, message = "Invalid serialization format", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while adding a schema", response = Void.class)}) + void addSchema(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Add new schema to group", required = true) SchemaInfo schemaInfo, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @POST + @Path("/{groupName}/schemas/versions/canRead") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema can be used for reads subject to compatibility policy in the schema validation rules.", response = CanRead.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Response to tell whether schema can be used to read existing schemas", response = CanRead.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while checking schema for readability", response = Void.class)}) + void canRead(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Checks if schema can be used to read the data in the stream based on compatibility rules.", required = true) SchemaInfo schemaInfo, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @POST + @Consumes({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Create a new Group", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added group", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 409, message = "Group with given name already exists", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while creating a Group", response = Void.class)}) + void createGroup(@ApiParam(value = "The Group configuration", required = true) CreateGroupRequest createGroupRequest, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @DELETE + @Path("/{groupName}") + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete a Group", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Successfully deleted the Group", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting the Group", response = Void.class)}) + void deleteGroup(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}/codecTypes") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get codecTypes for the group.", response = CodecTypesList.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found CodecTypes", response = CodecTypesList.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching codecTypes registered", response = Void.class)}) + void getCodecTypesList(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}/encodings/{encodingId}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get the encoding information corresponding to the encoding id.", response = EncodingInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Encoding", response = EncodingInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding info corresponding to encoding id", response = Void.class)}) + void getEncodingInfo(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Encoding id that identifies a unique combination of schema and codecType", required = true) @PathParam("encodingId") Integer encodingId, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch the properties of an existing Group", response = GroupProperties.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group properties", response = GroupProperties.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) + void getGroupProperties(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}/history") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch the history of schema evolution of a Group", response = GroupHistory.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group history", response = GroupHistory.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group history", response = Void.class)}) + void getGroupHistory(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}/schemas/versions") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get all schema versions for the group.", response = SchemaVersionsList.class, tags = {"Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Versioned history of schemas registered under the group", response = SchemaVersionsList.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) + void getSchemaVersions(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Type") @QueryParam("type") String type, + @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}/schemas") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch latest schema versions for all objects identified by SchemaInfo#type under a Group. If query param type is specified then latest schema for the type is returned.", response = SchemaVersionsList.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Latest schemas for all objects identified by SchemaInfo#type under the group", response = SchemaVersionsList.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group's latest schemas", response = Void.class)}) + void getSchemas(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Type of object") @QueryParam("type") String type, + @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @PUT + @Path("/{groupName}/encodings") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get an encoding id that uniquely identifies a schema version and codec type pair.", response = EncodingId.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Found Encoding", response = EncodingId.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name or version not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 412, message = "Codec type not registered", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding id", response = Void.class)}) + void getEncodingId(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Get schema corresponding to the version", required = true) GetEncodingIdRequest getEncodingIdRequest, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}/schemas/versions/{versionOrdinal}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class)}) + void getSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @DELETE + @Path("/{groupName}/schemas/versions/{versionOrdinal}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema version from the group.", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class)}) + void deleteSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Path("/{groupName}/schemas/{type}/versions/{version}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class)}) + void getSchemaFromVersion(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Schema type from SchemaInfo#type or VersionInfo#type", required = true) @PathParam("type") String type, + @ApiParam(value = "Version number", required = true) @PathParam("version") Integer version, + @Suspended AsyncResponse asyncResponse); + + @DELETE + @Path("/{groupName}/schemas/{type}/versions/{version}") + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema version from the group.", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class)}) + void deleteSchemaVersion(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Schema type from SchemaInfo#type or VersionInfo#type", required = true) @PathParam("type") String type, + @ApiParam(value = "Version number", required = true) @PathParam("version") Integer version, + @Suspended AsyncResponse asyncResponse); + + @POST + @Path("/{groupName}/schemas/versions/find") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get the version for the schema if it is registered.", response = VersionInfo.class, tags = {"Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema version", response = VersionInfo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) + void getSchemaVersion(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Get schema corresponding to the version", required = true) SchemaInfo schemaInfo, + @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @GET + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "List all groups", response = ListGroupsResponse.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "List of all groups", response = ListGroupsResponse.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching the list of Groups", response = Void.class)}) + void listGroups(@ApiParam(value = "Continuation token") @QueryParam("continuationToken") String continuationToken, + @ApiParam(value = "The numbers of items to return") @QueryParam("limit") Integer limit, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @PUT + @Path("/{groupName}/rules") + @Consumes({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "update schema validation rules of an existing Group", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Updated schema validation policy", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 409, message = "Write conflict", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while updating Group's schema validation rules", response = Void.class)}) + void updateSchemaValidationRules(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "update group policy", required = true) UpdateValidationRulesRequest updateValidationRulesRequest, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + + @POST + @Path("/{groupName}/schemas/versions/validate") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema is compatible with schemas in the registry for current policy setting.", response = Valid.class, tags = {"Group", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema validation response", response = Valid.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while trying to validate schema", response = Void.class)}) + void validate(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "Checks if schema is valid with respect to supplied validation rules", required = true) ValidateRequest validateRequest, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + } + + + /** + * Sync Schemas apis. Identical to {@link SchemasApiAsync}. All methods in this interface are synchronous and return {@link Response} object. + * The purposes of this interface is to be used by proxy-client. + */ + @Path("/v1/schemas") + @io.swagger.annotations.Api(description = "the schemas API") + public interface SchemasApi { + @POST + @Path("/addedTo") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Gets a map of groups to version info where the schema if it is registered. SchemaInfo#properties is ignored while comparing the schema.", response = AddedTo.class, tags = {"Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema version", response = AddedTo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Schema not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Schema references", response = Void.class)}) + Response getSchemaReferences(@ApiParam(value = "Get schema references for the supplied schema", required = true) SchemaInfo schemaInfo); + + } + + /** + * Sync Schemas apis. Identical to {@link SchemasApi}. All methods in this interface are asynchronous. + */ + @Path("/v1/schemas") + @io.swagger.annotations.Api(description = "the schemas API") + public interface SchemasApiAsync { + @POST + @Path("/addedTo") + @Consumes({"application/json"}) + @Produces({"application/json"}) + @io.swagger.annotations.ApiOperation(value = "", notes = "Gets a map of groups to version info where the schema if it is registered. SchemaInfo#properties is ignored while comparing the schema.", response = AddedTo.class, tags = {"Schema", }) + @io.swagger.annotations.ApiResponses(value = { + @io.swagger.annotations.ApiResponse(code = 200, message = "Schema version", response = AddedTo.class), + @io.swagger.annotations.ApiResponse(code = 404, message = "Schema not found", response = Void.class), + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Schema references", response = Void.class)}) + void getSchemaReferences(@ApiParam(value = "Get schema references for the supplied schema", required = true) SchemaInfo schemaInfo, + @Suspended AsyncResponse asyncResponse); + } + +} diff --git a/contract/src/main/swagger/README.md b/contract/src/main/swagger/README.md new file mode 100644 index 000000000..bc196acf3 --- /dev/null +++ b/contract/src/main/swagger/README.md @@ -0,0 +1,44 @@ + +Instructions to generate Server REST API stubs + +## Delete previously generated directory +``` +rm -Rf server/src/main/java/io/pravega/schemaregistry/server/io.pravega.rest/generated +``` + +## Update schemaregistry.yaml +All REST API modifications should be done by updating the swagger/schemaregistry.yaml specification file. +This can be done manually or by using the online editor at http://editor.swagger.io. + +## Download Swagger codegen +Download swagger-codegen-cli from maven - http://repo1.maven.org/maven2/io/swagger/swagger-codegen-cli/2.2.3/swagger-codegen-cli-2.2.3.jar + +## Generate the API stubs using Swagger Codegen +``` +java -jar swagger-codegen-cli.jar generate -i /contract/src/main/swagger/SchemaRegistry.yaml -l jaxrs -c /contract/src/main/swagger/server.config.json -o /contract/ +``` + +## Remove extra files created by codegen +All files that get generated outside of the contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest folder should be deleted and not committed to git. + +## Update ApiV1.java +The JAXRS API stubs decorated with swagger annotations are generated in .../contract/io/pravega/schemaregistry/contract/v1/ApiV1.java class. +Copy these API descriptions into interfaces in .../contract/io.pravega.schemaregistry/contract//v1/ApiV1.java. +Also make an asynchronour version of APIs in .../contract/io.pravega.schemaregistry/server/rest/v1/ApiV1.java to use only jersey async interfaces. + +## Generate documentation +### Download Swagger2Markup CLI +https://jcenter.bintray.com/io/github/swagger2markup/swagger2markup-cli/1.3.3/swagger2markup-cli-1.3.3.jar + +### Generate and save the markup documentation +``` +java -Dswagger2markup.markupLanguage=MARKDOWN -Dswagger2markup.generatedExamplesEnabled=true -jar swagger2markup-cli-1.3.3.jar convert -i /contract/src/main/swagger/schemaregistry.yaml -f /documentation/src/docs/io.pravega.rest/restapis +``` diff --git a/contract/src/main/swagger/SchemaRegistry.yaml b/contract/src/main/swagger/SchemaRegistry.yaml new file mode 100644 index 000000000..92dba5fd1 --- /dev/null +++ b/contract/src/main/swagger/SchemaRegistry.yaml @@ -0,0 +1,867 @@ +# +# Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Description of the Pravega Schema Registry APIs. + +swagger: "2.0" +info: + description: "REST APIs for Pravega Schema Registry." + version: "0.0.1" + title: Pravega Schema Registry APIs + license: + name: "Apache 2.0" + url: "http://www.apache.org/licenses/LICENSE-2.0" +basePath: "/v1" +tags: +- name: "Group" + description: "Group related APIs" +- name: "Schemas" + description: "Schema related APIs" +schemes: + - http +paths: + /groups: + get: + tags: + - "Group" + operationId: listGroups + description: List all groups + produces: + - application/json + parameters: + - in: query + name: continuationToken + type: string + description: Continuation token + - in: query + name: limit + type: integer + description: The numbers of items to return + required: + - limit + responses: + 200: + description: List of all groups + schema: + $ref: "#/definitions/ListGroupsResponse" + 500: + description: Internal server error while fetching the list of Groups + post: + tags: + - "Group" + operationId: createGroup + description: Create a new Group + consumes: + - application/json + parameters: + - in: body + name: CreateGroupRequest + description: The Group configuration + required: true + schema: + type: object + properties: + groupName: + type: string + groupProperties: + $ref: "#/definitions/GroupProperties" + required: + - groupName + - groupProperties + responses: + 201: + description: Successfully added group + 409: + description: Group with given name already exists + 500: + description: Internal server error while creating a Group + /groups/{groupName}: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + get: + tags: + - "Group" + operationId: getGroupProperties + description: Fetch the properties of an existing Group + produces: + - application/json + responses: + 200: + description: Found Group properties + schema: + $ref: "#/definitions/GroupProperties" + 404: + description: Group with given name not found + 500: + description: Internal server error while fetching Group details + delete: + tags: + - "Group" + operationId: deleteGroup + description: Delete a Group + responses: + 204: + description: Successfully deleted the Group + 500: + description: Internal server error while deleting the Group + /groups/{groupName}/history: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + get: + tags: + - "Group" + operationId: getGroupHistory + description: Fetch the history of schema evolution of a Group + produces: + - application/json + responses: + 200: + description: Found Group history + schema: + $ref: "#/definitions/GroupHistory" + 404: + description: Group with given name not found + 500: + description: Internal server error while fetching Group history + /groups/{groupName}/rules: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + put: + tags: + - "Group" + operationId: updateSchemaValidationRules + description: update schema validation rules of an existing Group + consumes: + - application/json + parameters: + - in: body + name: UpdateValidationRulesRequest + description: update group policy + required: true + schema: + type: object + properties: + validationRules: + $ref: "#/definitions/SchemaValidationRules" + previousRules: + $ref: "#/definitions/SchemaValidationRules" + nullable: true + required: + - validationRules + responses: + 200: + description: Updated schema validation policy + 404: + description: Group with given name not found + 409: + description: Write conflict + 500: + description: Internal server error while updating Group's schema validation rules + /groups/{groupName}/schemas: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + - in: query + name: type + type: string + description: Type of object + get: + tags: + - "Group" + operationId: getSchemas + description: Fetch latest schema versions for all objects identified by SchemaInfo#type under a Group. If query param type is specified then latest schema for the type is returned. + produces: + - application/json + responses: + 200: + description: Latest schemas for all objects identified by SchemaInfo#type under the group + schema: + $ref: "#/definitions/SchemaVersionsList" + 404: + description: Group with given name not found + 500: + description: Internal server error while fetching Group's latest schemas + /groups/{groupName}/schemas/versions: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + get: + tags: + - "Group" + operationId: getSchemaVersions + description: Get all schema versions for the group + parameters: + - in: query + name: type + type: string + description: Type of object the schema describes. + produces: + - application/json + responses: + 200: + description: Versioned history of schemas registered under the group + schema: + $ref: "#/definitions/SchemaVersionsList" + 404: + description: Group with given name not found + 500: + description: Internal server error while fetching Group schema versions + post: + tags: + - "Group" + operationId: addSchema + description: Adds a new schema to the group + consumes: + - application/json + parameters: + - in: body + name: schemaInfo + description: Add new schema to group + required: true + schema: + $ref: "#/definitions/SchemaInfo" + produces: + - application/json + responses: + 201: + description: Successfully added schema to the group + schema: + $ref: "#/definitions/VersionInfo" + 404: + description: Group not found + 409: + description: Incompatible schema + 417: + description: Invalid serialization format + 500: + description: Internal server error while adding schema to group + /groups/{groupName}/schemas/versions/find: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + post: + tags: + - "Group" + operationId: getSchemaVersion + description: Get the version for the schema if it is registered. It does not automatically register the schema. To add new schema use addSchema + consumes: + - application/json + parameters: + - in: body + name: schemaInfo + description: Get schema corresponding to the version + required: true + schema: + $ref: "#/definitions/SchemaInfo" + produces: + - application/json + responses: + 200: + description: Schema version + schema: + $ref: "#/definitions/VersionInfo" + 404: + description: Group with given name not found + 500: + description: Internal server error fetching version for schema + /groups/{groupName}/schemas/versions/{versionOrdinal}: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + - in: path + name: versionOrdinal + description: Version ordinal + required: true + type: integer + format: int32 + get: + tags: + - "Group" + operationId: getSchemaFromVersionOrdinal + description: Get schema from the version ordinal that uniquely identifies the schema in the group. + produces: + - application/json + responses: + 200: + description: Schema corresponding to the version + schema: + $ref: "#/definitions/SchemaInfo" + 404: + description: Group with given name not found + 500: + description: Internal server error while fetching schema from version + delete: + tags: + - "Group" + operationId: deleteSchemaVersionOrinal + description: Delete schema identified by version from the group. + produces: + - application/json + responses: + 204: + description: Schema corresponding to the version + 404: + description: Group with given name not found + 500: + description: Internal server error while deleting schema from group + /groups/{groupName}/schemas/{type}/versions/{version}: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + - in: path + name: type + description: Schema type from SchemaInfo#type or VersionInfo#type + required: true + type: string + - in: path + name: version + description: Version number + required: true + type: integer + format: int32 + get: + tags: + - "Group" + operationId: getSchemaFromVersion + description: Get schema from the version ordinal that uniquely identifies the schema in the group. + produces: + - application/json + responses: + 200: + description: Schema corresponding to the version + schema: + $ref: "#/definitions/SchemaInfo" + 404: + description: Group with given name not found + 500: + description: Internal server error while fetching schema from version + delete: + tags: + - "Group" + operationId: deleteSchemaVersion + description: Delete schema version from the group. + produces: + - application/json + responses: + 204: + description: Schema corresponding to the version + 404: + description: Group with given name not found + 500: + description: Internal server error while deleting schema from group + /groups/{groupName}/schemas/versions/validate: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + post: + tags: + - "Group" + operationId: validate + description: Checks if given schema is compatible with schemas in the registry for current policy setting. + consumes: + - application/json + parameters: + - in: body + name: ValidateRequest + description: Checks if schema is valid with respect to supplied validation rules + required: true + schema: + type: object + properties: + schemaInfo: + $ref: "#/definitions/SchemaInfo" + validationRules: + $ref: "#/definitions/SchemaValidationRules" + required: + - schemaInfo + produces: + - application/json + responses: + 200: + description: Schema validation response + schema: + $ref: "#/definitions/Valid" + 404: + description: Group with given name not found + 500: + description: Internal server error while trying to validate schema + /groups/{groupName}/schemas/versions/canRead: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + post: + tags: + - "Group" + operationId: canRead + description: Checks if given schema can be used for reads subject to compatibility policy in the schema validation rules. + consumes: + - application/json + parameters: + - in: body + name: schemaInfo + description: Checks if schema can be used to read the data in the stream based on compatibility rules. + required: true + schema: + $ref: "#/definitions/SchemaInfo" + produces: + - application/json + responses: + 200: + description: Response to tell whether schema can be used to read existing schemas + schema: + $ref: "#/definitions/CanRead" + 404: + description: Group with given name not found + 500: + description: Internal server error while checking schema for readability + /groups/{groupName}/encodings: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + put: + tags: + - "Group" + operationId: getEncodingId + description: Get an encoding id that uniquely identifies a schema version and codec type pair. + consumes: + - application/json + parameters: + - in: body + name: GetEncodingIdRequest + description: Get schema corresponding to the version + required: true + schema: + type: object + properties: + versionInfo: + $ref: "#/definitions/VersionInfo" + codecType: + type: string + required: + - versionInfo + - codecType + produces: + - application/json + responses: + 200: + description: Found Encoding + schema: + $ref: "#/definitions/EncodingId" + 404: + description: Group with given name or version not found + 412: + description: Codec type not registered + 500: + description: Internal server error while getting encoding id + /groups/{groupName}/encodings/{encodingId}: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + - in: path + name: encodingId + description: Encoding id that identifies a unique combination of schema and codec type + required: true + type: integer + format: int32 + get: + tags: + - "Group" + operationId: getEncodingInfo + description: Get the encoding information corresponding to the encoding id. + produces: + - application/json + responses: + 200: + description: Found Encoding + schema: + $ref: "#/definitions/EncodingInfo" + 404: + description: Group or encoding id with given name not found + 500: + description: Internal server error while getting encoding info corresponding to encoding id + /groups/{groupName}/codecTypes: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + get: + tags: + - "Group" + operationId: getCodecTypesList + description: Get codecTypes for the group. + produces: + - application/json + responses: + 200: + description: Found CodecTypes + schema: + $ref: "#/definitions/CodecTypesList" + 404: + description: Group or encoding id with given name not found + 500: + description: Internal server error while fetching codecTypes registered + post: + tags: + - "Group" + operationId: addCodecType + description: Adds a new codecType to the group. + consumes: + - application/json + parameters: + - in: body + name: codecType + description: The codecType + required: true + schema: + type: string + responses: + 201: + description: Successfully added codecType to group + 404: + description: Group not found + 500: + description: Internal server error while registering codectype to a Group + /schemas/addedTo: + parameters: + post: + tags: + - "Schema" + operationId: getSchemaReferences + description: Gets a map of groups to version info where the schema if it is registered. SchemaInfo#properties is ignored while comparing the schema. + consumes: + - application/json + parameters: + - in: body + name: schemaInfo + description: Get schema references for the supplied schema + required: true + schema: + $ref: "#/definitions/SchemaInfo" + produces: + - application/json + responses: + 200: + description: Schema version + schema: + $ref: "#/definitions/AddedTo" + 404: + description: Schema not found + 500: + description: Internal server error while fetching Schema references +definitions: + ListGroupsResponse: + type: object + description: Map of Group names to group properties. For partially created groups, the group properties may be null. + properties: + groups: + type: object + additionalProperties: + $ref: "#/definitions/GroupProperties" + continuationToken: + description: Continuation token to identify the position of last group in the response. + type: string + required: + - continuationToken + GroupProperties: + type: object + description: Metadata for a group. + properties: + serializationFormat: + description: serialization format for the group. + $ref: "#/definitions/SerializationFormat" + schemaValidationRules: + description: Validation rules to apply while registering new schema. + $ref: "#/definitions/SchemaValidationRules" + allowMultipleTypes: + description: Flag to indicate whether to allow multiple schemas representing distinct objects to be registered in the group. + type: boolean + properties: + description: User defined Key value strings. + type: object + additionalProperties: + type: string + minLength: 0 + maxLength: 40 + required: + - serializationFormat + - allowMultipleTypes + - schemaValidationRules + SerializationFormat: + type: object + description: Serialization format enum that lists different serialization formats supported by the service. To use additional formats, use serializationFormat.Custom and supply customTypeName. + properties: + serializationFormat: + type: string + enum: + - Avro + - Protobuf + - Json + - Any + - Custom + customTypeName: + type: string + required: + - serializationFormat + SchemaInfo: + type: object + description: Schema information object that encapsulates various properties of a schema. + properties: + type: + description: Name of the schema. This identifies the type of object the schema payload represents. + type: string + serializationFormat: + description: Type of schema. + $ref: "#/definitions/SerializationFormat" + schemaData: + description: Base64 encoded string for binary data for schema. + type: string + format: binary + properties: + description: User defined key value strings. + type: object + additionalProperties: + type: string + minLength: 0 + maxLength: 40 + required: + - type + - serializationFormat + - schemaData + VersionInfo: + description: Version information object. + type: object + properties: + type: + description: Type of schema for this version. This is same value used in SchemaInfo#Type for the schema this version identifies. + type: string + version: + description: Version number that uniquely identifies the schema version among all schemas in the group that share the same Type. + type: integer + format: int32 + ordinal: + description: Version ordinal that uniquely identifies the position of the corresponding schema across all schemas in the group. + type: integer + format: int32 + required: + - type + - version + - ordinal + SchemaWithVersion: + type: object + description: Object that encapsulates SchemaInfo and its corresponding VersionInfo objects. + properties: + schemaInfo: + description: Schema information. + $ref: "#/definitions/SchemaInfo" + version: + description: Version information. + $ref: "#/definitions/VersionInfo" + required: + - schemaInfo + - version + SchemaVersionsList: + type: object + description: List of schemas with their versions. + properties: + schemas: + description: List of schemas with their versions. + type: array + items: + $ref: "#/definitions/SchemaWithVersion" + EncodingId: + type: object + description: Encoding id that uniquely identifies a schema version and codec type pair. + properties: + encodingId: + type: integer + format: int32 + description: encoding id generated by service. + required: + - encodingId + EncodingInfo: + type: object + description: Encoding information object that resolves the schema version and codec type used for corresponding encoding id. + properties: + schemaInfo: + description: Schema information object. + $ref: "#/definitions/SchemaInfo" + versionInfo: + description: Version information object. + $ref: "#/definitions/VersionInfo" + codecType: + description: Codec type. + type: string + required: + - schemaInfo + - versionInfo + - codecType + Compatibility: + type: object + description: Schema Compatibility validation rule. + required: + - name + - policy + properties: + name: + type: string + description: Name is used to identify the type of SchemaValidationRule. For Compatibility rule the name should be "Compatibility". + policy: + description: Compatibility policy enum. + type: string + enum: + - AllowAny + - DenyAll + - Backward + - Forward + - ForwardTransitive + - BackwardTransitive + - BackwardTill + - ForwardTill + - BackwardAndForwardTill + - Full + - FullTransitive + backwardTill: + description: Version for backward till if policy is BackwardTill or BackwardAndForwardTill. + $ref: "#/definitions/VersionInfo" + forwardTill: + description: Version for forward till if policy is ForwardTill or BackwardAndForwardTill. + $ref: "#/definitions/VersionInfo" + SchemaValidationRules: + type: object + description: Schema validation rules to be applied for new schema addition. Currently only one rule is supported - Compatibility. + properties: + rules: + type: object + additionalProperties: + $ref: "#/definitions/SchemaValidationRule" + SchemaValidationRule: + type: object + description: Schema validation rule base class. + required: + - rule + properties: + rule: + description: Specific schema validation rule. The only rule we have presently is Compatibility. The "name" is used to identify specific Rule type. The only rule supported in this is Compatibility. + oneOf: + - $ref: '#/definitions/Compatibility' + discriminator: + propertyName: name + mapping: + Compatibility: '#/definitions/Compatibility' + CodecTypesList: + type: object + description: Response object for listCodecTypes. + properties: + codecTypes: + type: array + description: List of codecTypes. + items: + type: string + Valid: + type: object + description: Response object for validateSchema api. + properties: + valid: + description: Whether given schema is valid with respect to existing group schemas against the configured validation rules. + type: boolean + required: + - valid + CanRead: + type: object + description: Response object for canRead api. + properties: + compatible: + description: Whether given schema is compatible and can be used for reads. Compatibility is checked against existing group schemas subject to group's configured compatibility policy. + type: boolean + required: + - compatible + GroupHistoryRecord: + type: object + description: Group History Record that describes each schema evolution - schema information, version generated for the schema, time and rules used for schema validation. + properties: + schemaInfo: + description: Schema information object. + $ref: "#/definitions/SchemaInfo" + version: + description: Schema version information object. + $ref: "#/definitions/VersionInfo" + validationRules: + description: Schema validation rules applied. + $ref: "#/definitions/SchemaValidationRules" + timestamp: + description: Timestamp when the schema was added. + type: integer + format: int64 + schemaString: + description: Schema as json string for serialization formats that registry service understands. + type: string + required: + - schemaInfo + - version + - validationRules + - timestamp + GroupHistory: + type: object + properties: + history: + type: array + description: Chronological list of Group History records. + items: + $ref: "#/definitions/GroupHistoryRecord" + AddedTo: + type: object + description: Map of Group names to versionInfos in the group. This is for all the groups where the schema is registered. + properties: + groups: + type: object + additionalProperties: + $ref: "#/definitions/VersionInfo" + required: + - groups diff --git a/contract/src/main/swagger/server.config.json b/contract/src/main/swagger/server.config.json new file mode 100644 index 000000000..f1ac7594b --- /dev/null +++ b/contract/src/main/swagger/server.config.json @@ -0,0 +1,8 @@ +{ +"sourceFolder" : "src/main/java", +"implFolder" : "src/main/java", +"modelPackage" : "io.pravega.schemaregistry.contract.generated.rest.model", +"apiPackage" : "io.pravega.schemaregistry.contract.generated.rest.server.api", +"library" : "jersey2", +"hideGenerationTimestamp" : true +} diff --git a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java new file mode 100644 index 000000000..c724715db --- /dev/null +++ b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java @@ -0,0 +1,138 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.transform; + +import com.google.common.collect.ImmutableMap; +import io.pravega.schemaregistry.contract.data.GroupHistoryRecord; +import io.pravega.schemaregistry.contract.generated.rest.model.Compatibility; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; +import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion; +import io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import org.junit.Test; + +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Collections; + +import static org.junit.Assert.*; + +public class ModelHelperTest { + @Test + public void testDecode() { + SerializationFormat type = new SerializationFormat().serializationFormat(SerializationFormat.SerializationFormatEnum.CUSTOM).customTypeName("a"); + SchemaValidationRules rules = new SchemaValidationRules().rules(Collections.emptyMap()); + SchemaInfo schema = new SchemaInfo() + .type("a").serializationFormat(type).schemaData(new byte[0]).properties(Collections.emptyMap()); + VersionInfo version = new VersionInfo().type("a").version(1).ordinal(1); + Compatibility compatibility = new Compatibility().name(Compatibility.class.getSimpleName()) + .policy(Compatibility.PolicyEnum.BACKWARDANDFORWARDTILL).backwardTill(version).forwardTill(version); + String codecType = "custom"; + + // decodes + io.pravega.schemaregistry.contract.data.SerializationFormat serializationFormat = ModelHelper.decode(type); + assertEquals(serializationFormat, io.pravega.schemaregistry.contract.data.SerializationFormat.Custom); + assertEquals(serializationFormat.getCustomTypeName(), "a"); + + io.pravega.schemaregistry.contract.data.SchemaInfo schemaInfo = ModelHelper.decode(schema); + assertEquals(schemaInfo.getType(), "a"); + assertEquals(schemaInfo.getSerializationFormat(), serializationFormat); + assertNotNull(schemaInfo.getSchemaData()); + assertNotNull(schemaInfo.getProperties()); + + io.pravega.schemaregistry.contract.data.Compatibility compatibilityDecoded = ModelHelper.decode(compatibility); + assertEquals(compatibilityDecoded.getCompatibility(), io.pravega.schemaregistry.contract.data.Compatibility.Type.BackwardAndForwardTill); + + io.pravega.schemaregistry.contract.data.SchemaValidationRules rulesDecoded = ModelHelper.decode(rules); + assertEquals(rulesDecoded.getRules().size(), 0); + + io.pravega.schemaregistry.contract.data.VersionInfo versionInfo = ModelHelper.decode(version); + assertEquals(versionInfo.getType(), version.getType()); + assertEquals(versionInfo.getVersion(), version.getVersion().intValue()); + + io.pravega.schemaregistry.contract.data.EncodingInfo encodingInfo = ModelHelper.decode(new EncodingInfo().schemaInfo(schema).versionInfo(version).codecType(codecType)); + assertEquals(encodingInfo.getCodecType(), "custom"); + assertEquals(encodingInfo.getVersionInfo(), versionInfo); + assertEquals(encodingInfo.getSchemaInfo(), schemaInfo); + io.pravega.schemaregistry.contract.data.SchemaWithVersion schemaWithVersion = ModelHelper.decode(new SchemaWithVersion().schemaInfo(schema).version(version)); + assertEquals(schemaWithVersion.getVersionInfo(), versionInfo); + assertEquals(schemaWithVersion.getSchemaInfo(), schemaInfo); + + io.pravega.schemaregistry.contract.data.EncodingId encodingId = ModelHelper.decode(new EncodingId().encodingId(1)); + assertEquals(encodingId.getId(), 1); + } + + @Test + public void testEncode() { + io.pravega.schemaregistry.contract.data.SerializationFormat serializationFormat = io.pravega.schemaregistry.contract.data.SerializationFormat.custom("custom"); + io.pravega.schemaregistry.contract.data.SchemaInfo schemaInfo = new io.pravega.schemaregistry.contract.data.SchemaInfo( + "name", serializationFormat, ByteBuffer.wrap(new byte[0]), ImmutableMap.of()); + io.pravega.schemaregistry.contract.data.VersionInfo versionInfo = new io.pravega.schemaregistry.contract.data.VersionInfo("a", 0, 1); + io.pravega.schemaregistry.contract.data.Compatibility rule = io.pravega.schemaregistry.contract.data.Compatibility.backwardTillAndForwardTill( + new io.pravega.schemaregistry.contract.data.VersionInfo("a", 0, 0), + new io.pravega.schemaregistry.contract.data.VersionInfo("a", 1, 1)); + io.pravega.schemaregistry.contract.data.SchemaValidationRules schemaValidationRules = io.pravega.schemaregistry.contract.data.SchemaValidationRules.of(rule); + io.pravega.schemaregistry.contract.data.GroupProperties prop = io.pravega.schemaregistry.contract.data.GroupProperties + .builder().serializationFormat(serializationFormat).schemaValidationRules(schemaValidationRules) + .allowMultipleTypes(true).properties(ImmutableMap.of()).build(); + String codecType = "codecType"; + + // encode test + VersionInfo version = ModelHelper.encode(versionInfo); + assertEquals(version.getVersion().intValue(), versionInfo.getVersion()); + assertEquals(version.getType(), versionInfo.getType()); + + SerializationFormat type = ModelHelper.encode(serializationFormat); + assertEquals(type.getSerializationFormat(), SerializationFormat.SerializationFormatEnum.CUSTOM); + + SchemaInfo schema = ModelHelper.encode(schemaInfo); + assertEquals(schema.getType(), schemaInfo.getType()); + assertEquals(schema.getProperties(), schemaInfo.getProperties()); + assertTrue(Arrays.equals(schema.getSchemaData(), schemaInfo.getSchemaData().array())); + assertEquals(schema.getSerializationFormat(), type); + + EncodingId encodingId = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.EncodingId(0)); + assertEquals(encodingId.getEncodingId().intValue(), 0); + + EncodingInfo encodingInfo = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.EncodingInfo(versionInfo, schemaInfo, codecType)); + assertEquals(encodingInfo.getCodecType(), codecType); + assertEquals(encodingInfo.getVersionInfo(), version); + assertEquals(encodingInfo.getSchemaInfo(), schema); + + SchemaValidationRules rules = ModelHelper.encode(schemaValidationRules); + assertEquals(rules.getRules().size(), 1); + + io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord schemaEvolution = ModelHelper.encode(new GroupHistoryRecord( + schemaInfo, versionInfo, schemaValidationRules, 100L, "")); + assertEquals(schemaEvolution.getSchemaInfo(), schema); + assertEquals(schemaEvolution.getValidationRules(), rules); + assertEquals(schemaEvolution.getVersion(), version); + assertEquals(schemaEvolution.getTimestamp().longValue(), 100L); + assertEquals(schemaEvolution.getSchemaString(), ""); + + Compatibility compatibility = ModelHelper.encode(rule); + assertEquals(compatibility.getPolicy(), Compatibility.PolicyEnum.BACKWARDANDFORWARDTILL); + + SchemaWithVersion schemaWithVersion = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.SchemaWithVersion(schemaInfo, versionInfo)); + assertEquals(schemaWithVersion.getSchemaInfo(), schema); + assertEquals(schemaWithVersion.getVersion(), version); + + GroupProperties groupProperties = ModelHelper.encode(prop); + assertEquals(groupProperties.getSerializationFormat(), type); + assertEquals(groupProperties.getSchemaValidationRules(), rules); + assertEquals(groupProperties.isAllowMultipleTypes(), prop.isAllowMultipleTypes()); + assertEquals(groupProperties.getProperties(), prop.getProperties()); + } + +} diff --git a/settings.gradle b/settings.gradle index 600f3080a..dc820fbbc 100644 --- a/settings.gradle +++ b/settings.gradle @@ -10,3 +10,7 @@ */ rootProject.name = 'schema-registry' +include 'client', + 'common', + 'contract' + \ No newline at end of file From a0876cc5a185a7a4a93f9df84647b0654a7afb47 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Sun, 7 Jun 2020 19:47:10 -0700 Subject: [PATCH 03/70] serializers copying over Signed-off-by: Shivesh Ranjan --- build.gradle | 27 + .../schemaregistry/GroupIdGenerator.java | 45 + .../schemaregistry/cache/EncodingCache.java | 65 + .../pravega/schemaregistry/codec/Codec.java | 23 + .../schemaregistry/codec/CodecFactory.java | 129 + .../schemaregistry/schemas/AvroSchema.java | 107 + .../schemaregistry/schemas/JSONSchema.java | 123 + .../schemas/ProtobufSchema.java | 122 + .../schemas/SchemaContainer.java | 21 + .../AbstractPravegaDeserializer.java | 133 + .../AbstractPravegaSerializer.java | 123 + .../serializers/AvroDeserlizer.java | 60 + .../serializers/AvroGenericDeserlizer.java | 56 + .../serializers/AvroSerializer.java | 62 + .../serializers/JSonGenericObject.java | 21 + .../serializers/JsonDeserlizer.java | 48 + .../serializers/JsonGenericDeserlizer.java | 52 + .../serializers/JsonSerializer.java | 37 + .../MultipleFormatGenericDeserializer.java | 37 + .../MultipleFormatJsonStringDeserializer.java | 58 + .../MultiplexedAndGenericDeserializer.java | 44 + .../serializers/MultiplexedDeserializer.java | 36 + .../serializers/MultiplexedSerializer.java | 36 + .../serializers/PravegaDeserializer.java | 18 + .../serializers/PravegaSerializer.java | 18 + .../serializers/ProtobufDeserlizer.java | 37 + .../ProtobufGenericDeserlizer.java | 77 + .../serializers/ProtobufSerializer.java | 32 + .../serializers/SerializerConfig.java | 182 ++ .../serializers/SerializerFactory.java | 675 ++++ .../pravega/schemaregistry/GroupIdTest.java | 29 + .../schemaregistry/cache/CacheTest.java | 41 + .../schemaregistry/codec/CodecTest.java | 47 + .../schemaregistry/schemas/TestSchemas.java | 103 + .../serializers/SerializerTest.java | 388 +++ .../schemaregistry/testobjs/Address.java | 22 + .../schemaregistry/testobjs/DerivedUser1.java | 28 + .../schemaregistry/testobjs/DerivedUser2.java | 28 + .../testobjs/SchemaDefinitions.java | 61 + .../pravega/schemaregistry/testobjs/User.java | 28 + .../testobjs/generated/ProtobufTest.java | 2836 +++++++++++++++++ .../testobjs/generated/Test1.java | 389 +++ .../testobjs/generated/Test2.java | 469 +++ .../testobjs/generated/Test3.java | 549 ++++ .../src/test/resources/avro/avroTest1.avsc | 9 + .../src/test/resources/avro/avroTest2.avsc | 10 + .../src/test/resources/avro/avroTest3.avsc | 11 + .../src/test/resources/proto/protobufTest.pb | Bin 0 -> 498 bytes .../test/resources/proto/protobufTest.proto | 28 + settings.gradle | 5 + 50 files changed, 7585 insertions(+) create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/cache/EncodingCache.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/codec/CodecFactory.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/schemas/SchemaContainer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaSerializer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/JSonGenericObject.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatGenericDeserializer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaDeserializer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaSerializer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java create mode 100644 serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java create mode 100644 serializers/src/test/java/io/pravega/schemaregistry/cache/CacheTest.java create mode 100644 serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java create mode 100644 serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java create mode 100644 serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java create mode 100644 serializers/src/test/java/io/pravega/schemaregistry/testobjs/Address.java create mode 100644 serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser1.java create mode 100644 serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser2.java create mode 100644 serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java create mode 100644 serializers/src/test/java/io/pravega/schemaregistry/testobjs/User.java create mode 100644 serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/ProtobufTest.java create mode 100644 serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test1.java create mode 100644 serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test2.java create mode 100644 serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test3.java create mode 100644 serializers/src/test/resources/avro/avroTest1.avsc create mode 100644 serializers/src/test/resources/avro/avroTest2.avsc create mode 100644 serializers/src/test/resources/avro/avroTest3.avsc create mode 100644 serializers/src/test/resources/proto/protobufTest.pb create mode 100644 serializers/src/test/resources/proto/protobufTest.proto diff --git a/build.gradle b/build.gradle index 52125793d..f7b2f6d7f 100644 --- a/build.gradle +++ b/build.gradle @@ -196,3 +196,30 @@ def getProjectVersion() { } return ver } + +project('serializers') { + dependencies { + compile project(':common') + compile project(':client') + compile group: 'org.apache.avro', name: 'avro', version: avroVersion + compile group: 'org.apache.avro', name: 'avro-protobuf', version: avroProtobufVersion + compile group: 'com.google.protobuf', name: 'protobuf-java', version: protobufProtocVersion + compile group: 'com.google.protobuf', name:'protobuf-gradle-plugin', version: protobufGradlePlugin + compile group: 'com.google.protobuf', name: 'protobuf-java-util', version: protobufUtilVersion + compile group: 'io.pravega', name: 'pravega-client', version: pravegaVersion + compile group: 'org.xerial.snappy', name: 'snappy-java', version: snappyVersion + compile group: 'com.fasterxml.jackson.module', name: 'jackson-module-jsonSchema', version: jacksonVersion + testCompile group: 'org.slf4j', name: 'log4j-over-slf4j', version: slf4jApiVersion + testCompile group: 'ch.qos.logback', name: 'logback-classic', version: qosLogbackVersion + testCompile group: 'io.pravega', name: 'pravega-test-testcommon', version: pravegaVersion + } + + javadoc { + title = "Serializers" + dependsOn delombok + source = delombok.outputDir + failOnError = false + exclude "**/impl/**"; + options.addBooleanOption("Xdoclint:all,-reference", true) + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java b/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java new file mode 100644 index 000000000..36a1488fb --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java @@ -0,0 +1,45 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry; + +import com.google.common.base.Preconditions; +import lombok.SneakyThrows; + +/** + * Defines strategies for generating groupId for stream. + * Currently there is only one naming strategy that uses streams fully qualified scoped stream name and encodes it using + * URL encoder. + */ +public class GroupIdGenerator { + private GroupIdGenerator() { + } + + @SneakyThrows + public static String getGroupId(Type type, String... args) { + switch (type) { + case QualifiedStreamName: + Preconditions.checkNotNull(args); + Preconditions.checkArgument(args.length == 2); + StringBuilder qualifiedNameBuilder = new StringBuilder(); + qualifiedNameBuilder.append("pravega://"); + for (String arg : args) { + qualifiedNameBuilder.append(arg); + qualifiedNameBuilder.append("/"); + } + return qualifiedNameBuilder.toString(); + default: + throw new IllegalArgumentException(); + } + } + + public enum Type { + QualifiedStreamName, + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/cache/EncodingCache.java b/serializers/src/main/java/io/pravega/schemaregistry/cache/EncodingCache.java new file mode 100644 index 000000000..b1a336a5f --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/cache/EncodingCache.java @@ -0,0 +1,65 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.cache; + +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import lombok.Data; +import lombok.SneakyThrows; +import lombok.Synchronized; + +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +/** + * Local cache for storing schemas that are retrieved from the registry service. + */ +public class EncodingCache { + private static final Map GROUP_CACHE_MAP = new HashMap<>(); + + private final LoadingCache encodingCache; + + private EncodingCache(String groupId, SchemaRegistryClient schemaRegistryClient) { + encodingCache = CacheBuilder.newBuilder().build(new CacheLoader() { + @Override + public EncodingInfo load(EncodingId key) { + return schemaRegistryClient.getEncodingInfo(groupId, key); + } + }); + } + + @SneakyThrows(ExecutionException.class) + public EncodingInfo getGroupEncodingInfo(EncodingId encodingId) { + return encodingCache.get(encodingId); + } + + @Synchronized + public static EncodingCache getEncodingCacheForGroup(String groupId, SchemaRegistryClient schemaRegistryClient) { + Key key = new Key(schemaRegistryClient, groupId); + if (GROUP_CACHE_MAP.containsKey(key)) { + return GROUP_CACHE_MAP.get(key); + } else { + EncodingCache value = new EncodingCache(groupId, schemaRegistryClient); + GROUP_CACHE_MAP.put(key, value); + return value; + } + } + + @Data + private static class Key { + private final SchemaRegistryClient client; + private final String groupId; + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java new file mode 100644 index 000000000..4cfaf7052 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java @@ -0,0 +1,23 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.codec; + +import java.nio.ByteBuffer; + +/** + * Codec interface that defines methods to encode and decoder data for a given codec type. + */ +public interface Codec { + String getCodecType(); + + ByteBuffer encode(ByteBuffer data); + + ByteBuffer decode(ByteBuffer data); +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/CodecFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/CodecFactory.java new file mode 100644 index 000000000..d3d74b572 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/CodecFactory.java @@ -0,0 +1,129 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.codec; + +import lombok.SneakyThrows; +import org.xerial.snappy.Snappy; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; + +/** + * Factory class for creating codecs for codec types . + */ +public class CodecFactory { + public static final String NONE = ""; + public static final String MIME_GZIP = "application/x-gzip"; + public static final String MIME_SNAPPY = "application/x-snappy-framed"; + + private static final Noop NOOP = new Noop(); + private static final GZipCodec GZIP_COMPRESSOR = new GZipCodec(); + private static final SnappyCodec SNAPPY_COMPRESSOR = new SnappyCodec(); + + public static Codec none() { + return NOOP; + } + + public static Codec gzip() { + return GZIP_COMPRESSOR; + } + + public static Codec snappy() { + return SNAPPY_COMPRESSOR; + } + + private static class Noop implements Codec { + @Override + public String getCodecType() { + return NONE; + } + + @Override + public ByteBuffer encode(ByteBuffer data) { + return data; + } + + @Override + public ByteBuffer decode(ByteBuffer data) { + return data; + } + } + + private static class GZipCodec implements Codec { + @Override + public String getCodecType() { + return MIME_GZIP; + } + + @SneakyThrows(IOException.class) + @Override + public ByteBuffer encode(ByteBuffer data) { + byte[] array = new byte[data.remaining()]; + data.get(array); + + ByteArrayOutputStream bos = new ByteArrayOutputStream(array.length); + GZIPOutputStream gzipOS = new GZIPOutputStream(bos); + gzipOS.write(array); + gzipOS.close(); + byte[] compressed = bos.toByteArray(); + return ByteBuffer.wrap(compressed); + } + + @SneakyThrows(IOException.class) + @Override + public ByteBuffer decode(ByteBuffer data) { + byte[] array = new byte[data.remaining()]; + data.get(array); + + ByteArrayInputStream bis = new ByteArrayInputStream(array); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + GZIPInputStream gzipIS = new GZIPInputStream(bis); + byte[] buffer = new byte[1024]; + int len; + while ((len = gzipIS.read(buffer)) != -1) { + bos.write(buffer, 0, len); + } + byte[] uncompressed = bos.toByteArray(); + return ByteBuffer.wrap(uncompressed); + } + } + + private static class SnappyCodec implements Codec { + @Override + public String getCodecType() { + return MIME_SNAPPY; + } + + @SneakyThrows(IOException.class) + @Override + public ByteBuffer encode(ByteBuffer data) { + byte[] array = new byte[data.remaining()]; + data.get(array); + + byte[] compressed = Snappy.compress(array); + return ByteBuffer.wrap(compressed); + } + + @SneakyThrows(IOException.class) + @Override + public ByteBuffer decode(ByteBuffer data) { + byte[] array = new byte[data.remaining()]; + data.get(array); + + byte[] uncompressed = Snappy.uncompress(array); + return ByteBuffer.wrap(uncompressed); + } + } + +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java new file mode 100644 index 000000000..5cb4e7ae1 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java @@ -0,0 +1,107 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.schemas; + +import com.google.common.base.Charsets; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import lombok.Getter; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.reflect.ReflectData; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.specific.SpecificRecordBase; + +import java.nio.ByteBuffer; + +/** + * Container class for Avro Schema. + * + * @param Type of element. + */ +public class AvroSchema implements SchemaContainer { + @Getter + private final Schema schema; + private final SchemaInfo schemaInfo; + + private AvroSchema(Schema schema) { + this.schema = schema; + this.schemaInfo = new SchemaInfo(schema.getFullName(), + SerializationFormat.Avro, getSchemaBytes(), ImmutableMap.of()); + } + + /** + * Method to create a typed AvroSchema for the given class. It extracts the avro schema from the class. + * For Avro generated classes, the schema is retrieved from the class. + * For POJOs the schema is extracted using avro's {@link ReflectData}. + * + * @param tClass Class whose object's schema is used. + * @param Type of the Java class. + * @return {@link AvroSchema} with generic type T that extracts and captures the avro schema. + */ + public static AvroSchema of(Class tClass) { + Schema schema; + if (SpecificRecordBase.class.isAssignableFrom(tClass)) { + schema = SpecificData.get().getSchema(tClass); + } else { + schema = ReflectData.get().getSchema(tClass); + } + return new AvroSchema<>(schema); + } + + /** + * Method to create a typed AvroSchema of type {@link GenericRecord} from the given schema. + * + * @param schema Schema to use. + * @return Returns an AvroSchema with {@link GenericRecord} type. + */ + public static AvroSchema of(Schema schema) { + return new AvroSchema<>(schema); + } + + /** + * It is same as {@link #of(Class)} except that it generates an AvroSchema typed as {@link SpecificRecordBase}. + * + * This is useful for supplying a map of Avro schemas for multiplexed serializers and deserializers. + * + * @param tClass Class whose schema should be used. + * @param Type of class whose schema is to be used. + * @return Returns an AvroSchema with {@link SpecificRecordBase} type. + */ + public static AvroSchema ofBaseType(Class tClass) { + Preconditions.checkArgument(SpecificRecordBase.class.isAssignableFrom(tClass)); + + return new AvroSchema<>(SpecificData.get().getSchema(tClass)); + } + + /** + * Method to create a typed AvroSchema of type {@link GenericRecord} from schema info. + * + * @param schemainfo Schema info object that has schema data in binary form. + * @return Returns an AvroSchema with {@link GenericRecord} type. + */ + public static AvroSchema from(SchemaInfo schemainfo) { + String schemaString = new String(schemainfo.getSchemaData().array(), Charsets.UTF_8); + Schema schema = new Schema.Parser().parse(schemaString); + + return new AvroSchema<>(schema); + } + + private ByteBuffer getSchemaBytes() { + return ByteBuffer.wrap(schema.toString().getBytes(Charsets.UTF_8)); + } + + @Override + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java new file mode 100644 index 000000000..958208e51 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -0,0 +1,123 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.schemas; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.module.jsonSchema.JsonSchema; +import com.fasterxml.jackson.module.jsonSchema.JsonSchemaGenerator; +import com.google.common.base.Charsets; +import com.google.common.collect.ImmutableMap; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import lombok.Getter; +import lombok.SneakyThrows; + +import java.nio.ByteBuffer; + +/** + * Container class for Json Schema. + * + * @param Type of element. + */ +public class JSONSchema implements SchemaContainer { + private final String schemaString; + @Getter + private final Class tClass; + @Getter + private final Class tDerivedClass; + + @Getter + private final JsonSchema schema; + + private final SchemaInfo schemaInfo; + + private JSONSchema(JsonSchema schema, String name, String schemaString, Class tClass) { + this(schema, name, schemaString, tClass, tClass); + } + + private JSONSchema(JsonSchema schema, String name, String schemaString, Class tClass, Class tDerivedClass) { + String type = name != null ? name : schema.getId(); + // Add empty name if the name is not supplied and cannot be extracted from the json schema id. + type = type != null ? type : ""; + this.schemaString = schemaString; + this.schemaInfo = new SchemaInfo(type, SerializationFormat.Json, getSchemaBytes(), ImmutableMap.of()); + this.tClass = tClass; + this.tDerivedClass = tDerivedClass; + this.schema = schema; + } + + /** + * Method to create a typed JSONSchema for the given class. It extracts the json schema from the class. + * For POJOs the schema is extracted using jacksons {@link JsonSchemaGenerator}. + * + * @param tClass Class whose object's schema is used. + * @param Type of the Java class. + * @return {@link JSONSchema} with generic type T that extracts and captures the json schema. + */ + @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) + public static JSONSchema of(Class tClass) { + ObjectMapper objectMapper = new ObjectMapper(); + JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(objectMapper); + JsonSchema schema = schemaGen.generateSchema(tClass); + String schemaString = objectMapper.writeValueAsString(schema); + + return new JSONSchema<>(schema, null, schemaString, tClass); + } + + /** + * Method to create a typed JSONSchema of type {@link Object} from the given schema. + * + * @param type type of object identified by {@link SchemaInfo#type}. + * @param schemaString Schema string to use. + * @return Returns an JSONSchema with {@link Object} type. + */ + @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) + public static JSONSchema of(String type, String schemaString) { + ObjectMapper objectMapper = new ObjectMapper(); + JsonSchema schema = objectMapper.readValue(schemaString, JsonSchema.class); + return new JSONSchema<>(schema, type, schemaString, Object.class); + } + + @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) + public static JSONSchema ofBaseType(Class tDerivedClass, Class tClass) { + ObjectMapper objectMapper = new ObjectMapper(); + JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(objectMapper); + JsonSchema schema = schemaGen.generateSchema(tDerivedClass); + String schemaString = objectMapper.writeValueAsString(schema); + + return new JSONSchema<>(schema, null, schemaString, tClass, tDerivedClass); + } + + /** + * Method to create a typed JSONSchema of type {@link Object} from the given schema. + * + * @param schemaInfo Schema info to translate into json schema. + * @return Returns an JSONSchema with {@link Object} type. + */ + @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) + public static JSONSchema from(SchemaInfo schemaInfo) { + ObjectMapper objectMapper = new ObjectMapper(); + String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); + + JsonSchema schema = objectMapper.readValue(schemaString, JsonSchema.class); + return new JSONSchema<>(schema, schemaInfo.getType(), schemaString, Object.class); + } + + private ByteBuffer getSchemaBytes() { + return ByteBuffer.wrap(schemaString.getBytes(Charsets.UTF_8)); + } + + @Override + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java new file mode 100644 index 000000000..e78e190e6 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java @@ -0,0 +1,122 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.schemas; + +import com.google.common.collect.ImmutableMap; +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import com.google.protobuf.Message; +import com.google.protobuf.Parser; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import lombok.Data; +import lombok.Getter; +import lombok.SneakyThrows; + +import java.nio.ByteBuffer; + +/** + * Container class for protobuf schema. + * Protobuf schemas are represented using {@link com.google.protobuf.DescriptorProtos.FileDescriptorSet}. + * + * @param Type of element. + */ +@Data +public class ProtobufSchema implements SchemaContainer { + @Getter + private final Parser parser; + @Getter + private final DescriptorProtos.FileDescriptorSet descriptorProto; + + private final SchemaInfo schemaInfo; + + private ProtobufSchema(String name, Parser parser, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { + this.parser = parser; + this.descriptorProto = fileDescriptorSet; + this.schemaInfo = new SchemaInfo(name, SerializationFormat.Protobuf, getSchemaBytes(), ImmutableMap.of()); + } + + private ByteBuffer getSchemaBytes() { + return ByteBuffer.wrap(descriptorProto.toByteArray()); + } + + @Override + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + /** + * Method to generate protobuf schema from the supplied protobuf generated class and {@link DescriptorProtos.FileDescriptorSet}. + * + * @param tClass Class for code generated protobuf message. + * @param fileDescriptorSet file descriptor set representing a protobuf schema. + * @param Type of protobuf message + * @return {@link ProtobufSchema} with generic type T that captures protobuf schema and parser. + */ + @SneakyThrows + @SuppressWarnings("unchecked") + public static ProtobufSchema of(Class tClass, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { + T defaultInstance = (T) tClass.getMethod("getDefaultInstance").invoke(null); + Parser tParser = (Parser) defaultInstance.getParserForType(); + return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getFullName(), tParser, fileDescriptorSet); + } + + /** + * Method to generate protobuf schema of generic type {@link DynamicMessage} using the {@link DescriptorProtos.FileDescriptorSet}. + * It is for representing protobuf schemas to be used for generic deserialization of protobuf serialized payload into + * {@link DynamicMessage}. + * Note: this does not have a protobuf parser and can only be used during deserialization. + * + * @param name Name of protobuf message + * @param fileDescriptorSet file descriptor set representing a protobuf schema. + * @return {@link ProtobufSchema} with generic type {@link DynamicMessage} that captures protobuf schema. + */ + @SneakyThrows + @SuppressWarnings("unchecked") + public static ProtobufSchema of(String name, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { + return new ProtobufSchema<>(name, null, fileDescriptorSet); + } + + /** + * Method to generate protobuf schema from the supplied protobuf generated class and {@link DescriptorProtos.FileDescriptorSet}. + * It is same as {@link #of(Class, DescriptorProtos.FileDescriptorSet)} except that it returns a Protobuf schema + * typed {@link GeneratedMessageV3}. + * It is useful in multiplexed deserializer to pass all objects to deserialize into as base {@link GeneratedMessageV3} objects. + * + * @param tDerivedClass Class for code generated protobuf message. + * @param fileDescriptorSet file descriptor set representing a protobuf schema. + * @param Type of protobuf message + * @return {@link ProtobufSchema} with generic type {@link GeneratedMessageV3} that captures protobuf schema and parser of type T. + */ + @SneakyThrows + @SuppressWarnings("unchecked") + public static ProtobufSchema ofBaseType(Class tDerivedClass, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { + T defaultInstance = (T) tDerivedClass.getMethod("getDefaultInstance").invoke(null); + Parser tParser = (Parser) defaultInstance.getParserForType(); + + return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getFullName(), tParser, fileDescriptorSet); + } + + /** + * Method to generate protobuf schema of generic type {@link DynamicMessage} from schemaInfo {@link SchemaInfo}. + * + * @param schemaInfo Schema Info + * @return {@link ProtobufSchema} with generic type {@link DynamicMessage} that captures protobuf schema. + */ + @SneakyThrows + @SuppressWarnings("unchecked") + public static ProtobufSchema from(SchemaInfo schemaInfo) { + DescriptorProtos.FileDescriptorSet fileDescriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaInfo.getSchemaData()); + + return new ProtobufSchema<>(schemaInfo.getType(), null, fileDescriptorSet); + } +} + diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/SchemaContainer.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/SchemaContainer.java new file mode 100644 index 000000000..f4f1a796e --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/SchemaContainer.java @@ -0,0 +1,21 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.schemas; + +import io.pravega.schemaregistry.contract.data.SchemaInfo; + +/** + * Interface for container class for schemas for different serialization formats. + * + * @param Type of object. + */ +public interface SchemaContainer { + SchemaInfo getSchemaInfo(); +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java new file mode 100644 index 000000000..be227486e --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java @@ -0,0 +1,133 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.cache.EncodingCache; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.SchemaContainer; +import lombok.Synchronized; +import lombok.extern.slf4j.Slf4j; + +import javax.annotation.Nullable; +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; + +import static io.pravega.schemaregistry.codec.CodecFactory.NONE; + +@Slf4j +abstract class AbstractPravegaDeserializer implements Serializer { + private static final byte PROTOCOL = 0x0; + private static final int HEADER_SIZE = 1 + Integer.BYTES; + + private final String groupId; + private final SchemaRegistryClient client; + // This can be null. If no schema is supplied, it means the intent is to deserialize into writer schema. + // If headers are not encoded, then this will be the latest schema from the registry + private final SchemaInfo schemaInfo; + private final AtomicBoolean encodeHeader; + private final SerializerConfig.Decoder decoder; + private final boolean skipHeaders; + private final EncodingCache encodingCache; + + protected AbstractPravegaDeserializer(String groupId, + SchemaRegistryClient client, + @Nullable SchemaContainer schema, + boolean skipHeaders, + SerializerConfig.Decoder decoder, + EncodingCache encodingCache) { + this.groupId = groupId; + this.client = client; + this.encodingCache = encodingCache; + this.schemaInfo = schema == null ? null : schema.getSchemaInfo(); + this.encodeHeader = new AtomicBoolean(); + this.skipHeaders = skipHeaders; + this.decoder = decoder; + + initialize(); + } + + @Synchronized + private void initialize() { + GroupProperties groupProperties = client.getGroupProperties(groupId); + + Map properties = groupProperties.getProperties(); + boolean toEncodeHeader = !properties.containsKey(SerializerFactory.ENCODE) || + Boolean.parseBoolean(properties.get(SerializerFactory.ENCODE)); + this.encodeHeader.set(toEncodeHeader); + + if (schemaInfo != null) { + log.info("Validate caller supplied schema."); + if (!client.canReadUsing(groupId, schemaInfo)) { + throw new IllegalArgumentException("Cannot read using schema" + schemaInfo.getType()); + } + } else { + if (!this.encodeHeader.get()) { + log.warn("No reader schema is supplied and stream does not have encoding headers."); + } + } + } + + @Override + public ByteBuffer serialize(T obj) { + throw new IllegalStateException(); + } + + @Override + public T deserialize(ByteBuffer data) { + if (this.encodeHeader.get()) { + SchemaInfo writerSchema = null; + String codecType = NONE; + if (skipHeaders) { + int currentPos = data.position(); + data.position(currentPos + HEADER_SIZE); + } else { + byte protocol = data.get(); + EncodingId encodingId = new EncodingId(data.getInt()); + EncodingInfo encodingInfo = encodingCache.getGroupEncodingInfo(encodingId); + codecType = encodingInfo.getCodecType(); + writerSchema = encodingInfo.getSchemaInfo(); + } + + ByteBuffer uncompressed = decoder.decode(codecType, data); + byte[] array = new byte[uncompressed.remaining()]; + uncompressed.get(array); + + InputStream inputStream = new ByteArrayInputStream(array); + if (schemaInfo == null) { // deserialize into writer schema + // pass writer schema for schema to be read into + return deserialize(inputStream, writerSchema, writerSchema); + } else { + // pass reader schema for schema on read to the underlying implementation + return deserialize(inputStream, writerSchema, schemaInfo); + } + } else { + // pass reader schema for schema on read to the underlying implementation + byte[] array = new byte[data.remaining()]; + data.get(array); + InputStream inputStream = new ByteArrayInputStream(array); + + return deserialize(inputStream, null, schemaInfo); + } + } + + protected abstract T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema); + + boolean isEncodeHeader() { + return encodeHeader.get(); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaSerializer.java new file mode 100644 index 000000000..6043a805f --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaSerializer.java @@ -0,0 +1,123 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.client.stream.Serializer; +import io.pravega.common.util.BitConverter; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.codec.Codec; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import io.pravega.schemaregistry.schemas.SchemaContainer; +import lombok.Getter; +import lombok.SneakyThrows; + +import java.io.ByteArrayOutputStream; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +abstract class AbstractPravegaSerializer implements Serializer { + private static final byte PROTOCOL = 0x0; + + private final String groupId; + + private final SchemaInfo schemaInfo; + private final AtomicReference encodingId; + private final AtomicBoolean encodeHeader; + private final SchemaRegistryClient client; + @Getter + private final Codec codec; + private final boolean registerSchema; + + protected AbstractPravegaSerializer(String groupId, + SchemaRegistryClient client, + SchemaContainer schema, + Codec codec, + boolean registerSchema) { + Preconditions.checkNotNull(groupId); + Preconditions.checkNotNull(client); + Preconditions.checkNotNull(codec); + Preconditions.checkNotNull(schema); + + this.groupId = groupId; + this.client = client; + this.schemaInfo = schema.getSchemaInfo(); + this.registerSchema = registerSchema; + this.encodingId = new AtomicReference<>(); + this.codec = codec; + this.encodeHeader = new AtomicBoolean(); + initialize(); + } + + private void initialize() { + GroupProperties groupProperties = client.getGroupProperties(groupId); + + Map properties = groupProperties.getProperties(); + boolean toEncodeHeader = !properties.containsKey(SerializerFactory.ENCODE) || + Boolean.parseBoolean(properties.get(SerializerFactory.ENCODE)); + encodeHeader.set(toEncodeHeader); + VersionInfo version; + if (registerSchema) { + // register schema + version = client.addSchema(groupId, schemaInfo); + } else { + // get already registered schema version. If schema is not registered, this will throw an exception. + version = client.getVersionForSchema(groupId, schemaInfo); + } + if (toEncodeHeader) { + encodingId.set(client.getEncodingId(groupId, version, codec.getCodecType())); + } + } + + @SneakyThrows + @Override + public ByteBuffer serialize(T obj) { + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + ByteArrayOutputStream dataStream = new ByteArrayOutputStream(); + + if (this.encodeHeader.get()) { + Preconditions.checkNotNull(schemaInfo); + + outputStream.write(PROTOCOL); + BitConverter.writeInt(outputStream, encodingId.get().getId()); + } + + // if schema is not null, pass the schema to the serializer implementation + if (schemaInfo != null) { + serialize(obj, schemaInfo, dataStream); + } else { + serialize(obj, null, dataStream); + } + + dataStream.flush(); + + byte[] array = dataStream.toByteArray(); + + ByteBuffer compressed = codec.encode(ByteBuffer.wrap(array)); + array = new byte[compressed.remaining()]; + compressed.get(array); + + outputStream.write(array); + return ByteBuffer.wrap(outputStream.toByteArray()); + } + + protected abstract void serialize(T var, SchemaInfo schema, OutputStream outputStream); + + @Override + public T deserialize(ByteBuffer bytes) { + throw new IllegalStateException(); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java new file mode 100644 index 000000000..f27c712c8 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java @@ -0,0 +1,60 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Charsets; +import com.google.common.base.Preconditions; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import io.pravega.schemaregistry.cache.EncodingCache; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.AvroSchema; +import lombok.SneakyThrows; +import org.apache.avro.Schema; +import org.apache.avro.generic.IndexedRecord; +import org.apache.avro.io.BinaryDecoder; +import org.apache.avro.io.DecoderFactory; +import org.apache.avro.specific.SpecificDatumReader; + +import java.io.InputStream; + +class AvroDeserlizer extends AbstractPravegaDeserializer { + private final AvroSchema avroSchema; + private final LoadingCache knownSchemas; + + AvroDeserlizer(String groupId, SchemaRegistryClient client, + AvroSchema schema, + SerializerConfig.Decoder decoder, EncodingCache encodingCache) { + super(groupId, client, schema, false, decoder, encodingCache); + Preconditions.checkNotNull(schema); + this.avroSchema = schema; + this.knownSchemas = CacheBuilder.newBuilder().build(new CacheLoader() { + @Override + public Schema load(byte[] schemaData) throws Exception { + String schemaString = new String(schemaData, Charsets.UTF_8); + return new Schema.Parser().parse(schemaString); + } + }); + } + + @SneakyThrows + @Override + protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + Preconditions.checkNotNull(writerSchemaInfo); + Schema writerSchema = knownSchemas.get(writerSchemaInfo.getSchemaData().array()); + Schema readerSchema = avroSchema.getSchema(); + + SpecificDatumReader datumReader = new SpecificDatumReader<>(writerSchema, readerSchema); + BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null); + return datumReader.read(null, decoder); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java new file mode 100644 index 000000000..6b6164ede --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java @@ -0,0 +1,56 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import io.pravega.schemaregistry.cache.EncodingCache; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.AvroSchema; +import lombok.SneakyThrows; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericDatumReader; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.BinaryDecoder; +import org.apache.avro.io.DecoderFactory; + +import javax.annotation.Nullable; +import java.io.InputStream; + +class AvroGenericDeserlizer extends AbstractPravegaDeserializer { + private final LoadingCache knownSchemas; + + AvroGenericDeserlizer(String groupId, SchemaRegistryClient client, @Nullable AvroSchema schema, + SerializerConfig.Decoder decoder, EncodingCache encodingCache) { + super(groupId, client, schema, false, decoder, encodingCache); + this.knownSchemas = CacheBuilder.newBuilder().build(new CacheLoader() { + @Override + public Schema load(SchemaInfo schemaInfo) throws Exception { + return AvroSchema.from(schemaInfo).getSchema(); + } + }); + } + + @SneakyThrows + @Override + protected GenericRecord deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + Preconditions.checkNotNull(writerSchemaInfo); + Schema writerSchema = knownSchemas.get(writerSchemaInfo); + Schema readerSchema = knownSchemas.get(readerSchemaInfo); + + GenericDatumReader genericDatumReader = new GenericDatumReader<>(writerSchema, readerSchema); + + BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null); + return genericDatumReader.read(null, decoder); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java new file mode 100644 index 000000000..c9789a8b6 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java @@ -0,0 +1,62 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.codec.Codec; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.AvroSchema; +import lombok.SneakyThrows; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericDatumWriter; +import org.apache.avro.generic.IndexedRecord; +import org.apache.avro.io.BinaryEncoder; +import org.apache.avro.io.EncoderFactory; +import org.apache.avro.reflect.ReflectDatumWriter; +import org.apache.avro.specific.SpecificDatumWriter; +import org.apache.avro.specific.SpecificRecord; + +import java.io.ByteArrayOutputStream; +import java.io.OutputStream; + +class AvroSerializer extends AbstractPravegaSerializer { + private final AvroSchema avroSchema; + AvroSerializer(String groupId, SchemaRegistryClient client, AvroSchema schema, + Codec codec, boolean registerSchema) { + super(groupId, client, schema, codec, registerSchema); + this.avroSchema = schema; + } + + @SneakyThrows + @Override + protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream) { + Schema schema = avroSchema.getSchema(); + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null); + + if (IndexedRecord.class.isAssignableFrom(var.getClass())) { + if (SpecificRecord.class.isAssignableFrom(var.getClass())) { + SpecificDatumWriter writer = new SpecificDatumWriter<>(schema); + writer.write(var, encoder); + } else { + GenericDatumWriter writer = new GenericDatumWriter<>(schema); + writer.write(var, encoder); + } + } else { + ReflectDatumWriter writer = new ReflectDatumWriter<>(schema); + writer.write(var, encoder); + } + + encoder.flush(); + outputStream.write(out.toByteArray()); + outputStream.flush(); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JSonGenericObject.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JSonGenericObject.java new file mode 100644 index 000000000..dd12752eb --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JSonGenericObject.java @@ -0,0 +1,21 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.module.jsonSchema.JsonSchema; +import lombok.Data; + +import java.util.Map; + +@Data +public class JSonGenericObject { + private final Map object; + private final JsonSchema jsonSchema; +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java new file mode 100644 index 000000000..899f2cbb3 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java @@ -0,0 +1,48 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.annotation.PropertyAccessor; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.cache.EncodingCache; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.JSONSchema; +import lombok.SneakyThrows; + +import java.io.IOException; +import java.io.InputStream; + +import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; + +class JsonDeserlizer extends AbstractPravegaDeserializer { + private final JSONSchema jsonSchema; + private final ObjectMapper objectMapper; + + JsonDeserlizer(String groupId, SchemaRegistryClient client, + JSONSchema schema, + SerializerConfig.Decoder decoder, EncodingCache encodingCache) { + super(groupId, client, schema, true, decoder, encodingCache); + Preconditions.checkNotNull(schema); + this.jsonSchema = schema; + this.objectMapper = new ObjectMapper(); + objectMapper.setVisibility(PropertyAccessor.ALL, Visibility.ANY); + objectMapper.setVisibility(PropertyAccessor.FIELD, Visibility.ANY); + objectMapper.setVisibility(PropertyAccessor.CREATOR, Visibility.ANY); + } + + @SneakyThrows({JsonProcessingException.class, IOException.class}) + @Override + protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + return objectMapper.readValue(inputStream, jsonSchema.getTDerivedClass()); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java new file mode 100644 index 000000000..bc9dfce51 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java @@ -0,0 +1,52 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.module.jsonSchema.JsonSchema; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import io.pravega.schemaregistry.cache.EncodingCache; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.JSONSchema; +import lombok.SneakyThrows; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +class JsonGenericDeserlizer extends AbstractPravegaDeserializer { + private final ObjectMapper objectMapper; + private final LoadingCache knownSchemas; + + JsonGenericDeserlizer(String groupId, SchemaRegistryClient client, + SerializerConfig.Decoder decoder, EncodingCache encodingCache) { + super(groupId, client, null, false, decoder, encodingCache); + this.objectMapper = new ObjectMapper(); + this.knownSchemas = CacheBuilder.newBuilder().build(new CacheLoader() { + @Override + public JsonSchema load(SchemaInfo schemaInfo) throws Exception { + return JSONSchema.from(schemaInfo).getSchema(); + } + }); + } + + @SneakyThrows({JsonProcessingException.class, ExecutionException.class, IOException.class}) + @Override + protected JSonGenericObject deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + Map obj = objectMapper.readValue(inputStream, Map.class); + JsonSchema schema = writerSchemaInfo == null ? null : knownSchemas.get(writerSchemaInfo); + return new JSonGenericObject(obj, schema); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java new file mode 100644 index 000000000..dbf1c0676 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java @@ -0,0 +1,37 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Charsets; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.codec.Codec; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.JSONSchema; +import lombok.SneakyThrows; + +import java.io.OutputStream; + +class JsonSerializer extends AbstractPravegaSerializer { + private final ObjectMapper objectMapper; + JsonSerializer(String groupId, SchemaRegistryClient client, JSONSchema schema, + Codec codec, boolean registerSchema) { + super(groupId, client, schema, codec, registerSchema); + objectMapper = new ObjectMapper(); + } + + @SneakyThrows + @Override + protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream) { + String json = objectMapper.writeValueAsString(var); + outputStream.write(json.getBytes(Charsets.UTF_8)); + outputStream.flush(); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatGenericDeserializer.java new file mode 100644 index 000000000..cb71cf78e --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatGenericDeserializer.java @@ -0,0 +1,37 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.cache.EncodingCache; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; + +import java.io.InputStream; +import java.util.Map; + +class MultipleFormatGenericDeserializer extends AbstractPravegaDeserializer { + private final Map genericDeserializers; + + MultipleFormatGenericDeserializer(String groupId, SchemaRegistryClient client, + Map genericDeserializers, + SerializerConfig.Decoder decoder, + EncodingCache encodingCache) { + super(groupId, client, null, false, decoder, encodingCache); + this.genericDeserializers = genericDeserializers; + } + + @Override + protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + Preconditions.checkNotNull(writerSchema); + return genericDeserializers.get(writerSchema.getSerializationFormat()).deserialize(inputStream, writerSchema, readerSchema); + } +} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java new file mode 100644 index 000000000..f77a4a084 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java @@ -0,0 +1,58 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Preconditions; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.util.JsonFormat; +import io.pravega.schemaregistry.cache.EncodingCache; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import lombok.SneakyThrows; +import org.apache.avro.generic.GenericRecord; + +import java.io.InputStream; +import java.util.Map; + +class MultipleFormatJsonStringDeserializer extends AbstractPravegaDeserializer { + private final Map genericDeserializers; + private final ObjectMapper objectMapper = new ObjectMapper(); + + MultipleFormatJsonStringDeserializer(String groupId, SchemaRegistryClient client, + Map genericDeserializers, + SerializerConfig.Decoder decoder, + EncodingCache encodingCache) { + super(groupId, client, null, false, decoder, encodingCache); + this.genericDeserializers = genericDeserializers; + } + + @Override + protected String deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + Preconditions.checkNotNull(writerSchema); + return toJsonString(genericDeserializers.get(writerSchema.getSerializationFormat()).deserialize(inputStream, writerSchema, readerSchema)); + } + + @SneakyThrows + private String toJsonString(Object deserialize) { + if (deserialize instanceof GenericRecord) { + return deserialize.toString(); + } else if (deserialize instanceof DynamicMessage) { + JsonFormat.Printer printer = JsonFormat.printer().preservingProtoFieldNames().usingTypeRegistry(JsonFormat.TypeRegistry.newBuilder().build()); + return printer.print((DynamicMessage) deserialize); + } else if (deserialize instanceof JSonGenericObject) { + Map myobject = ((JSonGenericObject) deserialize).getObject(); + return objectMapper.writeValueAsString(myobject); + } else { + return deserialize.toString(); + } + } +} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java new file mode 100644 index 000000000..28c3bdf67 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java @@ -0,0 +1,44 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.cache.EncodingCache; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.SchemaInfo; + +import java.io.InputStream; +import java.util.Map; + +class MultiplexedAndGenericDeserializer extends AbstractPravegaDeserializer> { + private final Map> deserializers; + private final AbstractPravegaDeserializer genericDeserializer; + + MultiplexedAndGenericDeserializer(String groupId, SchemaRegistryClient client, + Map> deserializers, + AbstractPravegaDeserializer genericDeserializer, + SerializerConfig.Decoder decoder, + EncodingCache encodingCache) { + super(groupId, client, null, false, decoder, encodingCache); + this.deserializers = deserializers; + this.genericDeserializer = genericDeserializer; + } + + @Override + protected Either deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + Preconditions.checkNotNull(writerSchema); + if (deserializers.containsKey(writerSchema.getType())) { + return Either.left(deserializers.get(writerSchema.getType()).deserialize(inputStream, writerSchema, readerSchema)); + } else { + return Either.right(genericDeserializer.deserialize(inputStream, writerSchema, readerSchema)); + } + } +} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java new file mode 100644 index 000000000..f73558a43 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java @@ -0,0 +1,36 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.cache.EncodingCache; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; + +import java.io.InputStream; +import java.util.Map; + +class MultiplexedDeserializer extends AbstractPravegaDeserializer { + private final Map> deserializers; + + MultiplexedDeserializer(String groupId, SchemaRegistryClient client, + Map> deserializers, + SerializerConfig.Decoder decoder, + EncodingCache encodingCache) { + super(groupId, client, null, false, decoder, encodingCache); + this.deserializers = deserializers; + } + + @Override + protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + Preconditions.checkNotNull(writerSchema); + return deserializers.get(writerSchema.getType()).deserialize(inputStream, writerSchema, readerSchema); + } +} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java new file mode 100644 index 000000000..4a74d6488 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java @@ -0,0 +1,36 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.client.stream.Serializer; + +import java.nio.ByteBuffer; +import java.util.Map; + +class MultiplexedSerializer implements Serializer { + private final Map, AbstractPravegaSerializer> serializers; + + MultiplexedSerializer(Map, AbstractPravegaSerializer> serializers) { + this.serializers = serializers; + } + + @Override + @SuppressWarnings("unchecked") + public ByteBuffer serialize(T obj) { + Class tClass = (Class) obj.getClass(); + AbstractPravegaSerializer serializer = serializers.get(tClass); + return serializer.serialize(obj); + } + + @Override + public T deserialize(ByteBuffer serializedValue) { + throw new IllegalStateException(); + } +} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaDeserializer.java new file mode 100644 index 000000000..96dbfd944 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaDeserializer.java @@ -0,0 +1,18 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.schemaregistry.contract.data.SchemaInfo; + +import java.io.InputStream; + +public interface PravegaDeserializer { + T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema); +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaSerializer.java new file mode 100644 index 000000000..fa1b3fe38 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaSerializer.java @@ -0,0 +1,18 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.schemaregistry.contract.data.SchemaInfo; + +import java.io.OutputStream; + +public interface PravegaSerializer { + void serialize(T var, SchemaInfo schema, OutputStream outputStream); +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java new file mode 100644 index 000000000..a2bd0c9ac --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java @@ -0,0 +1,37 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import com.google.protobuf.GeneratedMessageV3; +import io.pravega.schemaregistry.cache.EncodingCache; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.ProtobufSchema; +import lombok.SneakyThrows; + +import java.io.InputStream; + +public class ProtobufDeserlizer extends AbstractPravegaDeserializer { + private final ProtobufSchema protobufSchema; + ProtobufDeserlizer(String groupId, SchemaRegistryClient client, + ProtobufSchema schema, SerializerConfig.Decoder decoder, + EncodingCache encodingCache) { + super(groupId, client, schema, true, decoder, encodingCache); + Preconditions.checkNotNull(schema); + this.protobufSchema = schema; + } + + @SneakyThrows + @Override + protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + return protobufSchema.getParser().parseFrom(inputStream); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java new file mode 100644 index 000000000..d01b7a470 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java @@ -0,0 +1,77 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.Descriptors; +import com.google.protobuf.DynamicMessage; +import io.pravega.schemaregistry.cache.EncodingCache; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.ProtobufSchema; +import lombok.SneakyThrows; +import org.apache.commons.lang3.SerializationException; + +import javax.annotation.Nullable; +import java.io.InputStream; + +public class ProtobufGenericDeserlizer extends AbstractPravegaDeserializer { + private final LoadingCache knownSchemas; + + ProtobufGenericDeserlizer(String groupId, SchemaRegistryClient client, @Nullable ProtobufSchema schema, + SerializerConfig.Decoder decoder, EncodingCache encodingCache) { + super(groupId, client, schema, false, decoder, encodingCache); + Preconditions.checkArgument(isEncodeHeader() || schema != null); + + this.knownSchemas = CacheBuilder.newBuilder().build(new CacheLoader() { + @Override + public Descriptors.Descriptor load(SchemaInfo schemaToUse) throws Exception { + DescriptorProtos.FileDescriptorSet descriptorSet = ProtobufSchema.from(schemaToUse).getDescriptorProto(); + + int count = descriptorSet.getFileCount(); + int nameStart = schemaToUse.getType().lastIndexOf("."); + String name = schemaToUse.getType().substring(nameStart + 1); + String pckg = nameStart < 0 ? "" : schemaToUse.getType().substring(0, nameStart); + DescriptorProtos.FileDescriptorProto mainDescriptor = descriptorSet.getFileList().stream() + .filter(x -> x.getPackage().startsWith(pckg) && + x.getMessageTypeList().stream().anyMatch(y -> y.getName().equals(name))) + .findAny().orElseThrow(IllegalArgumentException::new); + + Descriptors.FileDescriptor[] dependencyArray = new Descriptors.FileDescriptor[count]; + for (int i = 0; i < count; i++) { + Descriptors.FileDescriptor fd = Descriptors.FileDescriptor.buildFrom( + descriptorSet.getFile(i), + new Descriptors.FileDescriptor[]{}); + dependencyArray[i] = fd; + } + + Descriptors.FileDescriptor fd = Descriptors.FileDescriptor.buildFrom(mainDescriptor, dependencyArray); + + return fd.getMessageTypes().stream().filter(x -> x.getName().equals(name)) + .findAny().orElseThrow(() -> new SerializationException(String.format("schema for %s not found", schemaToUse.getType()))); + } + }); + } + + @SneakyThrows + @Override + protected DynamicMessage deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + Preconditions.checkArgument(writerSchemaInfo != null || readerSchemaInfo != null); + + SchemaInfo schemaToUse = readerSchemaInfo == null ? writerSchemaInfo : readerSchemaInfo; + Descriptors.Descriptor messageType = knownSchemas.get(schemaToUse); + + return DynamicMessage.parseFrom(messageType, inputStream); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java new file mode 100644 index 000000000..38c294733 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java @@ -0,0 +1,32 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.protobuf.Message; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.codec.Codec; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.ProtobufSchema; +import lombok.SneakyThrows; + +import java.io.OutputStream; + +class ProtobufSerializer extends AbstractPravegaSerializer { + ProtobufSerializer(String groupId, SchemaRegistryClient client, ProtobufSchema schema, + Codec codec, boolean registerSchema) { + super(groupId, client, schema, codec, registerSchema); + } + + @SneakyThrows + @Override + protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream) { + var.writeTo(outputStream); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java new file mode 100644 index 000000000..5d22e5f24 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -0,0 +1,182 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.SchemaRegistryClientConfig; +import io.pravega.schemaregistry.codec.Codec; +import io.pravega.schemaregistry.codec.CodecFactory; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.Compatibility; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaValidationRules; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import lombok.AccessLevel; +import lombok.Builder; +import lombok.Data; +import lombok.Getter; + +import java.nio.ByteBuffer; +import java.util.HashSet; +import java.util.Set; +import java.util.function.BiFunction; +import java.util.function.Function; + +import static io.pravega.schemaregistry.codec.CodecFactory.*; + +/** + * Serializer Config class that is passed to {@link SerializerFactory} for creating serializer. + */ +@Data +@Builder +public class SerializerConfig { + private final static Codec NOOP = CodecFactory.none(); + private final static Codec GZIP = CodecFactory.gzip(); + private final static Codec SNAPPY = CodecFactory.snappy(); + + /** + * Name of the group. + */ + private final String groupId; + /** + * Either the registry client or the {@link SchemaRegistryClientConfig} that can be used for creating a new registry client. + * Exactly one of the two option has to be supplied. + */ + private final Either registryConfigOrClient; + /** + * Flag to tell the serializer if the schema should be automatically registered before using it in {@link io.pravega.client.stream.EventStreamWriter}. + * It is recommended to register keep this flag as false in production systems and manage schema evolution explicitly and + * in lockstep with upgrade of existing pravega client applications. + */ + private final boolean autoRegisterSchema; + /** + * Flag to tell the serializer if the codec should be automatically registered before using the serializer in + * {@link io.pravega.client.stream.EventStreamWriter}. + * It is recommended to register keep this flag as false in production systems and manage codecTypes used by writers explicitly + * so that readers are aware of encodings used. + */ + private final boolean autoRegisterCodec; + /** + * Codec to use for compressing events after serializing them. + */ + private final Codec codec; + /** + * Function that should be applied on serialized data read from stream. This is invoked after reading the codecType + * from {@link EncodingInfo} and using the codec type read from it. + * It should return the uncompressed data back to the deserializer. + */ + private final Decoder decoder; + /** + * Tells the deserializer that if supplied decoder codecTypes do not match group codecTypes then fail and exit upfront. + */ + private final boolean failOnCodecMismatch; + + /** + * Flag to tell the serializer if the group should be created automatically. + * It is recommended to register keep this flag as false in production systems and create groups and add schemas + */ + private final boolean autoCreateGroup; + /** + * Group properties to use for creating the group if autoCreateGroup is set to true. + */ + private final GroupProperties groupProperties; + + public static final class SerializerConfigBuilder { + private Codec codec = NOOP; + + private Decoder decoder = new Decoder(); + + private boolean autoRegisterSchema = false; + private boolean autoRegisterCodec = false; + private boolean failOnCodecMismatch = true; + private Either registryConfigOrClient = null; + + private GroupProperties groupProperties = GroupProperties.builder().build(); + + public SerializerConfigBuilder decoder(String codecType, Function decoder) { + this.decoder = new Decoder(codecType, decoder); + return this; + } + + public SerializerConfigBuilder autoCreateGroup(SerializationFormat serializationFormat) { + return autoCreateGroup(serializationFormat, true); + } + + public SerializerConfigBuilder autoCreateGroup(SerializationFormat serializationFormat, boolean allowMultipleTypes) { + return autoCreateGroup(serializationFormat, SchemaValidationRules.of(Compatibility.fullTransitive()), allowMultipleTypes); + } + + public SerializerConfigBuilder autoCreateGroup(SerializationFormat serializationFormat, SchemaValidationRules rules, boolean allowMultipleTypes) { + this.autoCreateGroup = true; + this.groupProperties = new GroupProperties(serializationFormat, rules, allowMultipleTypes); + return this; + } + + public SerializerConfigBuilder registryClient(SchemaRegistryClient client) { + Preconditions.checkArgument(client != null); + this.registryConfigOrClient = Either.right(client); + return this; + } + + public SerializerConfigBuilder registryConfig(SchemaRegistryClientConfig config) { + Preconditions.checkArgument(config != null); + this.registryConfigOrClient = Either.left(config); + return this; + } + } + + static class Decoder { + private static final BiFunction DEFAULT = (x, y) -> { + switch (x) { + case NONE: + return NOOP.decode(y); + case MIME_GZIP: + return GZIP.decode(y); + case MIME_SNAPPY: + return SNAPPY.decode(y); + default: + throw new IllegalArgumentException(); + } + }; + + @Getter(AccessLevel.PACKAGE) + private final Set codecTypes; + private final BiFunction decoder; + + private Decoder(String codecType, Function decoder) { + this.decoder = (x, y) -> { + if (x.equals(codecType)) { + return decoder.apply(y); + } else { + return DEFAULT.apply(x, y); + } + }; + codecTypes = new HashSet<>(); + this.codecTypes.add(NONE); + this.codecTypes.add(MIME_GZIP); + this.codecTypes.add(MIME_SNAPPY); + this.codecTypes.add(codecType); + } + + private Decoder() { + this.decoder = DEFAULT; + codecTypes = new HashSet<>(); + this.codecTypes.add(NONE); + this.codecTypes.add(MIME_GZIP); + this.codecTypes.add(MIME_SNAPPY); + } + + ByteBuffer decode(String codecType, ByteBuffer bytes) { + return decoder.apply(codecType, bytes); + } + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java new file mode 100644 index 000000000..a4bfe0c75 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -0,0 +1,675 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import com.google.protobuf.Message; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.cache.EncodingCache; +import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.schemas.AvroSchema; +import io.pravega.schemaregistry.schemas.JSONSchema; +import io.pravega.schemaregistry.schemas.ProtobufSchema; +import io.pravega.schemaregistry.schemas.SchemaContainer; +import lombok.extern.slf4j.Slf4j; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.generic.IndexedRecord; +import org.apache.avro.specific.SpecificRecordBase; + +import javax.annotation.Nullable; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +@Slf4j +public class SerializerFactory { + public static final String ENCODE = "encode"; + + // region avro + + /** + * Creates a typed avro serializer for the Schema. The serializer implementation returned from this method is + * responsible for interacting with schema registry service and ensures that only valid registered schema can be used. + * + * Note: the returned serializer only implements {@link Serializer#serialize(Object)}. + * It does not implement {@link Serializer#deserialize(ByteBuffer)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schemaData Schema data that encapsulates an AvroSchema + * @param Type of event. It accepts either POJO or Avro generated classes and serializes them. + * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or + * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. + */ + public static Serializer avroSerializer(SerializerConfig config, AvroSchema schemaData) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemaData); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + String groupId = config.getGroupId(); + return new AvroSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), config.isAutoRegisterSchema()); + } + + /** + * Creates a typed avro deserializer for the Schema. The deserializer implementation returned from this method is + * responsible for interacting with schema registry service and validate the writer schema before using it. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schemaData Schema data that encapsulates an AvroSchema + * @param Type of event. The typed event should be an avro generated class. For generic type use {@link #avroGenericDeserializer} + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer avroDeserializer(SerializerConfig config, + AvroSchema schemaData) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemaData); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + + String groupId = config.getGroupId(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + return new AvroDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + } + + /** + * Creates a generic avro deserializer. It has the optional parameter for schema. + * If the schema is not supplied, the writer schema is used for deserialization into {@link GenericRecord}. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schemaData Schema data that encapsulates an AvroSchema. It can be null to indicate that writer schema should + * be used for deserialization. + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer avroGenericDeserializer(SerializerConfig config, + @Nullable AvroSchema schemaData) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + return new AvroGenericDeserlizer(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + } + + /** + * A multiplexed Avro serializer that takes a map of schemas and validates them individually. + * + * @param config Serializer config. + * @param schemas map of avro schemas. + * @param Base Type of schemas. + * @return a Serializer which can serialize events of different types for which schemas are supplied. + */ + public static Serializer avroMultiTypeSerializer(SerializerConfig config, + Map, AvroSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + Map, AbstractPravegaSerializer> serializerMap = schemas + .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + x -> new AvroSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), + config.isAutoRegisterSchema()))); + return new MultiplexedSerializer<>(serializerMap); + } + + /** + * A multiplexed Avro Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of avro schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer avroMultiTypeDeserializer( + SerializerConfig config, Map, AvroSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new AvroDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); + return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), + encodingCache); + } + + /** + * A multiplexed Avro Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of avro schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects or a generic + * object + */ + public static Serializer> avroTypedOrGenericDeserializer( + SerializerConfig config, Map, AvroSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new AvroDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); + AbstractPravegaDeserializer genericDeserializer = new AvroGenericDeserlizer(groupId, schemaRegistryClient, + null, config.getDecoder(), encodingCache); + return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, + config.getDecoder(), encodingCache); + } + // endregion + + // region protobuf + + /** + * Creates a typed protobuf serializer for the Schema. The serializer implementation returned from this method is + * responsible for interacting with schema registry service and ensures that only valid registered schema can be used. + * + * Note: the returned serializer only implements {@link Serializer#serialize(Object)}. + * It does not implement {@link Serializer#deserialize(ByteBuffer)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schemaData Schema data that encapsulates an Protobuf Schema. + * @param Type of event. + * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or + * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. + */ + public static Serializer protobufSerializer(SerializerConfig config, + ProtobufSchema schemaData) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + return new ProtobufSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), + config.isAutoRegisterSchema()); + } + + /** + * Creates a typed protobuf deserializer for the Schema. The deserializer implementation returned from this method is + * responsible for interacting with schema registry service and validate the writer schema before using it. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schemaData Schema data that encapsulates an ProtobufSchema + * @param Type of event. The typed event should be an avro generated class. For generic type use {@link #protobufGenericDeserializer} + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer protobufDeserializer(SerializerConfig config, + ProtobufSchema schemaData) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + // schema can be null in which case deserialization will happen into dynamic message + return new ProtobufDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + } + + /** + * Creates a generic protobuf deserializer. It has the optional parameter for schema. + * If the schema is not supplied, the writer schema is used for deserialization into {@link DynamicMessage}. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schema Schema data that encapsulates an ProtobufSchema. + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer protobufGenericDeserializer(SerializerConfig config, ProtobufSchema schema) { + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + + String groupId = config.getGroupId(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + return new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache); + } + + /** + * A multiplexed Protobuf serializer that takes a map of schemas and validates them individually. + * + * @param config Serializer config. + * @param schemas map of protobuf schemas. + * @param Base Type of schemas. + * @return a Serializer which can serialize events of different types for which schemas are supplied. + */ + public static Serializer protobufMultiTypeSerializer( + SerializerConfig config, Map, ProtobufSchema> schemas) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + Map, AbstractPravegaSerializer> serializerMap = schemas + .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + x -> new ProtobufSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), + config.isAutoRegisterSchema()))); + return new MultiplexedSerializer<>(serializerMap); + } + + /** + * A multiplexed protobuf Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of protobuf schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer protobufMultiTypeDeserializer( + SerializerConfig config, Map, ProtobufSchema> schemas) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); + return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), encodingCache); + } + + /** + * A multiplexed protobuf Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of protobuf schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer> protobufTypedOrGenericDeserializer( + SerializerConfig config, Map, ProtobufSchema> schemas) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); + ProtobufGenericDeserlizer genericDeserializer = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, + config.getDecoder(), encodingCache); + return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, + config.getDecoder(), encodingCache); + } + //endregion + + // region json + + /** + * Creates a typed json serializer for the Schema. The serializer implementation returned from this method is + * responsible for interacting with schema registry service and ensures that only valid registered schema can be used. + * + * Note: the returned serializer only implements {@link Serializer#serialize(Object)}. + * It does not implement {@link Serializer#deserialize(ByteBuffer)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schemaData Schema data that encapsulates an Json Schema. + * @param Type of event. + * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or + * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. + */ + public static Serializer jsonSerializer(SerializerConfig config, JSONSchema schemaData) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + return new JsonSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), + config.isAutoRegisterSchema()); + } + + /** + * Creates a typed json deserializer for the Schema. The deserializer implementation returned from this method is + * responsible for interacting with schema registry service and validate the writer schema before using it. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @param schemaData Schema data that encapsulates an JSONSchema + * @param Type of event. The typed event should be an avro generated class. For generic type use {@link #jsonGenericDeserializer} + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer jsonDeserializer(SerializerConfig config, JSONSchema schemaData) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + // schema can be null in which case deserialization will happen into dynamic message + return new JsonDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + } + + /** + * Creates a generic json deserializer. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer jsonGenericDeserializer(SerializerConfig config) { + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + + String groupId = config.getGroupId(); + + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + return new JsonGenericDeserlizer(groupId, schemaRegistryClient, config.getDecoder(), + encodingCache); + } + + /** + * A multiplexed Json serializer that takes a map of schemas and validates them individually. + * + * @param config Serializer config. + * @param schemas map of json schemas. + * @param Base Type of schemas. + * @return a Serializer which can serialize events of different types for which schemas are supplied. + */ + public static Serializer jsonMultiTypeSerializer( + SerializerConfig config, Map, JSONSchema> schemas) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + Map, AbstractPravegaSerializer> serializerMap = schemas + .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + x -> new JsonSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), + config.isAutoRegisterSchema()))); + return new MultiplexedSerializer<>(serializerMap); + } + + /** + * A multiplexed json Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of json schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer jsonMultiTypeDeserializer( + SerializerConfig config, Map, JSONSchema> schemas) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), + encodingCache))); + return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, + deserializerMap, config.getDecoder(), encodingCache); + } + + /** + * A multiplexed json Deserializer that takes a map of schemas and deserializes events into those events depending + * on the object type information in {@link EncodingInfo}. + * + * @param config Serializer config. + * @param schemas map of json schemas. + * @param Base type of schemas. + * @return a Deserializer which can deserialize events of different types in the stream into typed objects. + */ + public static Serializer> jsonTypedOrGenericDeserializer( + SerializerConfig config, Map, JSONSchema> schemas) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); + JsonGenericDeserlizer genericDeserializer = new JsonGenericDeserlizer(groupId, schemaRegistryClient, config.getDecoder(), + encodingCache); + + return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, + deserializerMap, genericDeserializer, config.getDecoder(), encodingCache); + } + //endregion + + // region custom + + /** + * A serializer that uses user supplied implementation of {@link PravegaSerializer} for serializing the objects. + * It also takes user supplied schema and registers/validates it against the registry. + * + * @param config Serializer config. + * @param schema Schema for the object to serialize + * @param serializer user supplied serializer + * @param Type of object to serialize + * @return Serializer that uses user supplied serialization function for serializing events. + */ + public static Serializer customSerializer(SerializerConfig config, SchemaContainer schema, PravegaSerializer serializer) { + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + return new AbstractPravegaSerializer(groupId, schemaRegistryClient, + schema, config.getCodec(), config.isAutoRegisterSchema()) { + @Override + protected void serialize(T var, SchemaInfo schema, OutputStream outputStream) { + serializer.serialize(var, schema, outputStream); + } + }; + } + + /** + * A deserializer that uses user supplied implementation of {@link PravegaDeserializer} for deserializing the data into + * typed java objects. + * + * @param config Serializer config. + * @param schema optional Schema for the object to deserialize + * @param deserializer user supplied deserializer + * @param Type of object to deserialize + * @return Deserializer that uses user supplied deserialization function for deserializing payload into typed events. + */ + public static Serializer customDeserializer(SerializerConfig config, @Nullable SchemaContainer schema, + PravegaDeserializer deserializer) { + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + return new AbstractPravegaDeserializer(groupId, schemaRegistryClient, schema, false, + config.getDecoder(), encodingCache) { + @Override + protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + return deserializer.deserialize(inputStream, writerSchema, readerSchema); + } + }; + } + // endregion + + // region multi format deserializer + + /** + * A deserializer that can read data where each event could be written with different serialization formats. + * + * @param config serializer config + * @return a deserializer that can deserialize protobuf, json or avro events into java objects. + */ + public static Serializer multiFormatGenericDeserializer(SerializerConfig config) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + AbstractPravegaDeserializer json = new JsonGenericDeserlizer(config.getGroupId(), schemaRegistryClient, + config.getDecoder(), encodingCache); + AbstractPravegaDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + encodingCache); + AbstractPravegaDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + encodingCache); + + Map map = new HashMap<>(); + map.put(SerializationFormat.Json, json); + map.put(SerializationFormat.Avro, avro); + map.put(SerializationFormat.Protobuf, protobuf); + return new MultipleFormatGenericDeserializer(groupId, schemaRegistryClient, map, config.getDecoder(), + encodingCache); + } + + /** + * A deserializer that can read data where each event could be written with different serialization formats and + * deserializes and converts them to a json string. + * + * @param config serializer config + * @return a deserializer that can deserialize protobuf, json or avro events into java objects. + */ + public static Serializer deserializerAsJsonString(SerializerConfig config) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + AbstractPravegaDeserializer json = new JsonGenericDeserlizer(config.getGroupId(), schemaRegistryClient, + config.getDecoder(), encodingCache); + AbstractPravegaDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + encodingCache); + AbstractPravegaDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + encodingCache); + + Map map = new HashMap<>(); + map.put(SerializationFormat.Json, json); + map.put(SerializationFormat.Avro, avro); + map.put(SerializationFormat.Protobuf, protobuf); + return new MultipleFormatJsonStringDeserializer(groupId, schemaRegistryClient, map, config.getDecoder(), + encodingCache); + } + // endregion + + private static void autoCreateGroup(SchemaRegistryClient client, SerializerConfig config) { + if (config.isAutoCreateGroup()) { + client.addGroup(config.getGroupId(), config.getGroupProperties()); + } + } + + private static void registerCodec(SchemaRegistryClient client, SerializerConfig config) { + if (config.isAutoRegisterCodec()) { + client.addCodecType(config.getGroupId(), config.getCodec().getCodecType()); + } + } + + private static void failOnCodecMismatch(SchemaRegistryClient client, SerializerConfig config) { + if (config.isFailOnCodecMismatch()) { + List codecTypesInGroup = client.getCodecTypes(config.getGroupId()); + if (!config.getDecoder().getCodecTypes().containsAll(codecTypesInGroup)) { + log.warn("Not all CodecTypes are supported by reader. Required codecTypes = {}", codecTypesInGroup); + throw new RuntimeException(String.format("Need all codecTypes in %s", codecTypesInGroup.toString())); + } + } + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java b/serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java new file mode 100644 index 000000000..ef0c7524c --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java @@ -0,0 +1,29 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry; + +import com.google.common.base.Charsets; +import org.junit.Test; + +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class GroupIdTest { + @Test + public void testGroupId() throws UnsupportedEncodingException { + String groupId = GroupIdGenerator.getGroupId(GroupIdGenerator.Type.QualifiedStreamName, "scope", "stream"); + + assertTrue(groupId.startsWith("pravega")); + assertEquals(URLDecoder.decode(groupId, Charsets.UTF_8.toString()), "pravega://scope/stream/"); + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/cache/CacheTest.java b/serializers/src/test/java/io/pravega/schemaregistry/cache/CacheTest.java new file mode 100644 index 000000000..bdb84cf30 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/cache/CacheTest.java @@ -0,0 +1,41 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.cache; + +import com.google.common.collect.ImmutableMap; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.codec.CodecFactory; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import org.junit.Test; + +import java.nio.ByteBuffer; + +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; + +public class CacheTest { + @Test + public void testCache() { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + String groupId = "groupId"; + EncodingId encodingId = new EncodingId(0); + EncodingInfo encodingInfo = new EncodingInfo(new VersionInfo("name", 0, 0), + new SchemaInfo("name", SerializationFormat.Avro, ByteBuffer.wrap(new byte[0]), ImmutableMap.of()), CodecFactory.snappy().getCodecType()); + doAnswer(x -> encodingInfo).when(client).getEncodingInfo(eq(groupId), eq(encodingId)); + EncodingCache cache = EncodingCache.getEncodingCacheForGroup(groupId, client); + assertEquals(encodingInfo, cache.getGroupEncodingInfo(encodingId)); + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java new file mode 100644 index 000000000..969eebb7e --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java @@ -0,0 +1,47 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.codec; + +import com.google.common.base.Charsets; +import org.junit.Test; + +import java.nio.ByteBuffer; +import java.util.Arrays; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class CodecTest { + @Test + public void testCodec() { + byte[] testStringBytes = "this is a test string".getBytes(Charsets.UTF_8); + Codec snappy = CodecFactory.snappy(); + assertEquals(snappy.getCodecType(), CodecFactory.MIME_SNAPPY); + ByteBuffer encoded = snappy.encode(ByteBuffer.wrap(testStringBytes)); + assertFalse(Arrays.equals(encoded.array(), testStringBytes)); + ByteBuffer decoded = snappy.decode(encoded); + assertTrue(Arrays.equals(decoded.array(), testStringBytes)); + + Codec gzip = CodecFactory.gzip(); + assertEquals(gzip.getCodecType(), CodecFactory.MIME_GZIP); + encoded = gzip.encode(ByteBuffer.wrap(testStringBytes)); + assertFalse(Arrays.equals(encoded.array(), testStringBytes)); + decoded = gzip.decode(encoded); + assertTrue(Arrays.equals(decoded.array(), testStringBytes)); + + Codec none = CodecFactory.none(); + assertEquals(none.getCodecType(), CodecFactory.NONE); + encoded = none.encode(ByteBuffer.wrap(testStringBytes)); + assertTrue(Arrays.equals(encoded.array(), testStringBytes)); + decoded = none.decode(encoded); + assertTrue(Arrays.equals(decoded.array(), testStringBytes)); + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java b/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java new file mode 100644 index 000000000..ea6b3ef95 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java @@ -0,0 +1,103 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.schemas; + +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.testobjs.DerivedUser1; +import io.pravega.schemaregistry.testobjs.DerivedUser2; +import io.pravega.schemaregistry.testobjs.SchemaDefinitions; +import io.pravega.schemaregistry.testobjs.User; +import io.pravega.schemaregistry.testobjs.generated.ProtobufTest; +import io.pravega.schemaregistry.testobjs.generated.Test1; +import io.pravega.schemaregistry.testobjs.generated.Test2; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.specific.SpecificRecordBase; +import org.junit.Test; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; + +import static io.pravega.schemaregistry.testobjs.SchemaDefinitions.JSON_SCHEMA_STRING; +import static org.junit.Assert.*; + +public class TestSchemas { + @Test + public void testAvroSchema() { + AvroSchema schema = AvroSchema.of(SchemaDefinitions.SCHEMA1); + assertNotNull(schema.getSchema()); + assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + + AvroSchema schema2 = AvroSchema.of(User.class); + assertNotNull(schema2.getSchema()); + assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + + AvroSchema schema3 = AvroSchema.of(Test1.class); + assertNotNull(schema3.getSchema()); + assertEquals(schema3.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + + AvroSchema schemabase1 = AvroSchema.ofBaseType(Test1.class); + assertNotNull(schemabase1.getSchema()); + assertEquals(schemabase1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + + AvroSchema schemabase2 = AvroSchema.ofBaseType(Test2.class); + assertNotNull(schemabase2.getSchema()); + assertEquals(schemabase2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); + } + + @Test + public void testProtobufSchema() throws IOException { + Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); + byte[] schemaBytes = Files.readAllBytes(path); + DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); + + ProtobufSchema schema = ProtobufSchema.of(ProtobufTest.Message1.class.getName(), descriptorSet); + assertNull(schema.getParser()); + assertNotNull(schema.getDescriptorProto()); + assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema schema2 = ProtobufSchema.of(ProtobufTest.Message1.class, descriptorSet); + assertNotNull(schema2.getParser()); + assertNotNull(schema2.getDescriptorProto()); + assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema baseSchema1 = ProtobufSchema.ofBaseType(ProtobufTest.Message1.class, descriptorSet); + assertNotNull(baseSchema1.getParser()); + assertNotNull(baseSchema1.getDescriptorProto()); + assertEquals(baseSchema1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema baseSchema2 = ProtobufSchema.ofBaseType(ProtobufTest.Message2.class, descriptorSet); + assertNotNull(baseSchema2.getParser()); + assertNotNull(baseSchema2.getDescriptorProto()); + assertEquals(baseSchema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + } + + @Test + public void testJsonSchema() { + JSONSchema schema = JSONSchema.of(User.class); + assertNotNull(schema.getSchema()); + assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + + JSONSchema schema2 = JSONSchema.of("Person", JSON_SCHEMA_STRING); + assertNotNull(schema2.getSchema()); + assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + + JSONSchema baseSchema1 = JSONSchema.ofBaseType(DerivedUser1.class, User.class); + assertNotNull(baseSchema1.getSchema()); + assertEquals(baseSchema1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + JSONSchema baseSchema2 = JSONSchema.ofBaseType(DerivedUser2.class, User.class); + assertNotNull(baseSchema2.getSchema()); + assertEquals(baseSchema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java new file mode 100644 index 000000000..79abb783e --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -0,0 +1,388 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.collect.ImmutableMap; +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.codec.CodecFactory; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.EncodingId; +import io.pravega.schemaregistry.contract.data.EncodingInfo; +import io.pravega.schemaregistry.contract.data.GroupProperties; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SchemaWithVersion; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.contract.data.VersionInfo; +import io.pravega.schemaregistry.schemas.AvroSchema; +import io.pravega.schemaregistry.schemas.JSONSchema; +import io.pravega.schemaregistry.schemas.ProtobufSchema; +import io.pravega.schemaregistry.testobjs.Address; +import io.pravega.schemaregistry.testobjs.DerivedUser1; +import io.pravega.schemaregistry.testobjs.DerivedUser2; +import io.pravega.schemaregistry.testobjs.generated.ProtobufTest; +import io.pravega.schemaregistry.testobjs.generated.Test1; +import io.pravega.schemaregistry.testobjs.generated.Test2; +import io.pravega.test.common.AssertExtensions; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.specific.SpecificRecordBase; +import org.junit.Test; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.HashMap; +import java.util.Map; + +import static org.junit.Assert.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.*; + +public class SerializerTest { + @Test + public void testAvroSerializers() { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + AvroSchema schema1 = AvroSchema.of(Test1.class); + AvroSchema schema2 = AvroSchema.of(Test2.class); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + VersionInfo versionInfo2 = new VersionInfo("name", 1, 1); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); + doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); + doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = SerializerFactory.avroSerializer(config, schema1); + Test1 test1 = new Test1("name", 1); + ByteBuffer serialized = serializer.serialize(test1); + + Serializer deserializer = SerializerFactory.avroDeserializer(config, schema1); + Test1 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, test1); + + serialized = serializer.serialize(test1); + Serializer genericDeserializer = SerializerFactory.avroGenericDeserializer(config, null); + GenericRecord genericDeserialized = genericDeserializer.deserialize(serialized); + assertEquals(genericDeserialized.get("name").toString(), "name"); + assertEquals(genericDeserialized.get("field1"), 1); + + // multi type + Test2 test2 = new Test2("name", 1, "2"); + + AvroSchema schema1Base = AvroSchema.ofBaseType(Test1.class); + AvroSchema schema2Base = AvroSchema.ofBaseType(Test2.class); + Map, AvroSchema> map = new HashMap<>(); + map.put(Test1.class, schema1Base); + map.put(Test2.class, schema2Base); + Serializer multiSerializer = SerializerFactory.avroMultiTypeSerializer(config, map); + serialized = multiSerializer.serialize(test1); + Serializer multiDeserializer = SerializerFactory.avroMultiTypeDeserializer(config, map); + SpecificRecordBase deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, test1); + + serialized = multiSerializer.serialize(test2); + deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, test2); + + Map, AvroSchema> map2 = new HashMap<>(); + map2.put(Test1.class, schema1Base); + Serializer> fallbackDeserializer = SerializerFactory.avroTypedOrGenericDeserializer(config, map2); + + serialized = multiSerializer.serialize(test1); + Either fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isLeft()); + assertEquals(fallback.getLeft(), test1); + + serialized = multiSerializer.serialize(test2); + + fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isRight()); + } + + @Test + public void testProtobufSerializers() throws IOException { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); + byte[] schemaBytes = Files.readAllBytes(path); + DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); + ProtobufSchema schema1 = ProtobufSchema.of(ProtobufTest.Message2.class, descriptorSet); + ProtobufSchema schema2 = ProtobufSchema.of(ProtobufTest.Message3.class, descriptorSet); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + VersionInfo versionInfo2 = new VersionInfo("name", 1, 1); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); + doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); + doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = SerializerFactory.protobufSerializer(config, schema1); + ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); + ByteBuffer serialized = serializer.serialize(message); + + Serializer deserializer = SerializerFactory.protobufDeserializer(config, schema1); + ProtobufTest.Message2 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, message); + + serialized = serializer.serialize(message); + Serializer genericDeserializer = SerializerFactory.protobufGenericDeserializer(config, null); + DynamicMessage generic = genericDeserializer.deserialize(serialized); + assertEquals(generic.getAllFields().size(), 2); + + // multi type + ProtobufTest.Message3 message2 = ProtobufTest.Message3.newBuilder().setName("name").setField1(1).setField2(2).build(); + + ProtobufSchema schema1Base = ProtobufSchema.ofBaseType(ProtobufTest.Message2.class, descriptorSet); + ProtobufSchema schema2Base = ProtobufSchema.ofBaseType(ProtobufTest.Message3.class, descriptorSet); + Map, ProtobufSchema> map = new HashMap<>(); + map.put(ProtobufTest.Message2.class, schema1Base); + map.put(ProtobufTest.Message3.class, schema2Base); + Serializer multiSerializer = SerializerFactory.protobufMultiTypeSerializer(config, map); + serialized = multiSerializer.serialize(message); + Serializer multiDeserializer = SerializerFactory.protobufMultiTypeDeserializer(config, map); + GeneratedMessageV3 deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, message); + + serialized = multiSerializer.serialize(message2); + deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, message2); + + Map, ProtobufSchema> map2 = new HashMap<>(); + map2.put(ProtobufTest.Message2.class, schema1Base); + Serializer> fallbackDeserializer = SerializerFactory.protobufTypedOrGenericDeserializer(config, map2); + serialized = multiSerializer.serialize(message); + Either fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isLeft()); + assertEquals(fallback.getLeft(), message); + + serialized = multiSerializer.serialize(message2); + + fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isRight()); + } + + @Test + public void testJsonSerializers() { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + JSONSchema schema1 = JSONSchema.of(DerivedUser1.class); + JSONSchema schema2 = JSONSchema.of(DerivedUser2.class); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + VersionInfo versionInfo2 = new VersionInfo("name", 1, 1); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); + doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); + doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = SerializerFactory.jsonSerializer(config, schema1); + DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); + ByteBuffer serialized = serializer.serialize(user1); + + Serializer deserializer = SerializerFactory.jsonDeserializer(config, schema1); + DerivedUser1 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, user1); + + serialized = serializer.serialize(user1); + Serializer genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + JSonGenericObject generic = genericDeserializer.deserialize(serialized); + assertEquals(generic.getJsonSchema(), schema1.getSchema()); + assertEquals(generic.getObject().size(), 4); + + // multi type + DerivedUser2 user2 = new DerivedUser2("user", new Address("street", "city"), 2, "user2"); + + JSONSchema schema1Base = JSONSchema.ofBaseType(DerivedUser1.class, Object.class); + JSONSchema schema2Base = JSONSchema.ofBaseType(DerivedUser2.class, Object.class); + Map, JSONSchema> map = new HashMap<>(); + map.put(DerivedUser1.class, schema1Base); + map.put(DerivedUser2.class, schema2Base); + Serializer multiSerializer = SerializerFactory.jsonMultiTypeSerializer(config, map); + serialized = multiSerializer.serialize(user1); + Serializer multiDeserializer = SerializerFactory.jsonMultiTypeDeserializer(config, map); + Object deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, user1); + + serialized = multiSerializer.serialize(user2); + deserialized2 = multiDeserializer.deserialize(serialized); + assertEquals(deserialized2, user2); + + Map, JSONSchema> map2 = new HashMap<>(); + map2.put(DerivedUser1.class, schema1Base); + Serializer> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); + serialized = multiSerializer.serialize(user1); + Either fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isLeft()); + assertEquals(fallback.getLeft(), user1); + + serialized = multiSerializer.serialize(user2); + + fallback = fallbackDeserializer.deserialize(serialized); + assertTrue(fallback.isRight()); + } + + @Test + public void testMultiformatDeserializers() throws IOException { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); + byte[] schemaBytes = Files.readAllBytes(path); + DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); + + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + AvroSchema schema1 = AvroSchema.of(Test1.class); + ProtobufSchema schema2 = ProtobufSchema.of(ProtobufTest.Message2.class, descriptorSet); + JSONSchema schema3 = JSONSchema.of(DerivedUser1.class); + + VersionInfo versionInfo1 = new VersionInfo("avro", 0, 0); + VersionInfo versionInfo2 = new VersionInfo("proto", 1, 1); + VersionInfo versionInfo3 = new VersionInfo("json", 2, 2); + + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); + doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(schema3.getSchemaInfo())); + doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); + doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); + doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> new EncodingInfo(versionInfo3, schema3.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer avroSerializer = SerializerFactory.avroSerializer(config, schema1); + Test1 test1 = new Test1("name", 1); + ByteBuffer serializedAvro = avroSerializer.serialize(test1); + + Serializer protobufSerializer = SerializerFactory.protobufSerializer(config, schema2); + ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); + ByteBuffer serializedProto = protobufSerializer.serialize(message); + + Serializer jsonSerializer = SerializerFactory.jsonSerializer(config, schema3); + DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); + ByteBuffer serializedJson = jsonSerializer.serialize(user1); + + Serializer deserializer = SerializerFactory.multiFormatGenericDeserializer(config); + Object deserialized = deserializer.deserialize(serializedAvro); + assertTrue(deserialized instanceof GenericRecord); + deserialized = deserializer.deserialize(serializedProto); + assertTrue(deserialized instanceof DynamicMessage); + deserialized = deserializer.deserialize(serializedJson); + assertTrue(deserialized instanceof JSonGenericObject); + + Serializer jsonStringDeserializer = SerializerFactory.deserializerAsJsonString(config); + serializedAvro.position(0); + String jsonString = jsonStringDeserializer.deserialize(serializedAvro); + assertNotNull(jsonString); + serializedProto.position(0); + jsonString = jsonStringDeserializer.deserialize(serializedProto); + assertNotNull(jsonString); + serializedJson.position(0); + jsonString = jsonStringDeserializer.deserialize(serializedJson); + assertNotNull(jsonString); + } + + @Test + public void testNoEncodingProto() throws IOException { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); + byte[] schemaBytes = Files.readAllBytes(path); + DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); + ProtobufSchema schema1 = ProtobufSchema.of(ProtobufTest.Message2.class, descriptorSet); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any) + .properties(ImmutableMap.of(SerializerFactory.ENCODE, Boolean.toString(false))).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> new SchemaWithVersion(schema1.getSchemaInfo(), versionInfo1)).when(client).getLatestSchemaVersion(anyString(), any()); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = SerializerFactory.protobufSerializer(config, schema1); + verify(client, never()).getEncodingId(anyString(), any(), any()); + + ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); + ByteBuffer serialized = serializer.serialize(message); + + Serializer deserializer = SerializerFactory.protobufDeserializer(config, schema1); + verify(client, never()).getEncodingInfo(anyString(), any()); + + ProtobufTest.Message2 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, message); + + serialized = serializer.serialize(message); + AssertExtensions.assertThrows(IllegalArgumentException.class, () -> SerializerFactory.protobufGenericDeserializer(config, null)); + + SchemaInfo latestSchema = client.getLatestSchemaVersion("groupId", null).getSchemaInfo(); + ProtobufSchema schemaDynamic = ProtobufSchema.of(latestSchema.getType(), descriptorSet); + Serializer genericDeserializer = SerializerFactory.protobufGenericDeserializer(config, schemaDynamic); + + DynamicMessage generic = genericDeserializer.deserialize(serialized); + assertEquals(generic.getAllFields().size(), 2); + } + + @Test + public void testNoEncodingJson() throws IOException { + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + JSONSchema schema1 = JSONSchema.of(DerivedUser1.class); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any) + .properties(ImmutableMap.of(SerializerFactory.ENCODE, Boolean.toString(false))).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> new SchemaWithVersion(schema1.getSchemaInfo(), versionInfo1)).when(client).getLatestSchemaVersion(anyString(), any()); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = SerializerFactory.jsonSerializer(config, schema1); + verify(client, never()).getEncodingId(anyString(), any(), any()); + DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); + ByteBuffer serialized = serializer.serialize(user1); + + Serializer deserializer = SerializerFactory.jsonDeserializer(config, schema1); + verify(client, never()).getEncodingInfo(anyString(), any()); + DerivedUser1 deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, user1); + + serialized = serializer.serialize(user1); + + Serializer genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + + JSonGenericObject generic = genericDeserializer.deserialize(serialized); + assertNotNull(generic.getObject()); + assertNull(generic.getJsonSchema()); + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/Address.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/Address.java new file mode 100644 index 000000000..fb5113a4d --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/Address.java @@ -0,0 +1,22 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.testobjs; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@AllArgsConstructor +@NoArgsConstructor +public class Address { + private String streetAddress; + private String city; +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser1.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser1.java new file mode 100644 index 000000000..3e448b694 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser1.java @@ -0,0 +1,28 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.testobjs; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +@EqualsAndHashCode(callSuper = true) +public class DerivedUser1 extends User { + @Getter + private String user1; + + public DerivedUser1(String name, Address address, int age, String user1) { + super(name, address, age); + this.user1 = user1; + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser2.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser2.java new file mode 100644 index 000000000..5d8a7aef2 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser2.java @@ -0,0 +1,28 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.testobjs; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +@EqualsAndHashCode(callSuper = true) +public class DerivedUser2 extends User { + @Getter + private String user2; + + public DerivedUser2(String name, Address address, int age, String user2) { + super(name, address, age); + this.user2 = user2; + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java new file mode 100644 index 000000000..ba4fa50fe --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java @@ -0,0 +1,61 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.testobjs; + +import org.apache.avro.Schema; +import org.apache.avro.SchemaBuilder; + +public class SchemaDefinitions { + public static final Schema SCHEMA1 = SchemaBuilder + .record("MyTest") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .endRecord(); + + public static final Schema SCHEMA2 = SchemaBuilder + .record("MyTest") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("b") + .type(Schema.create(Schema.Type.STRING)) + .withDefault("backward compatible with schema1") + .endRecord(); + + public static final Schema SCHEMA3 = SchemaBuilder + .record("MyTest") + .fields() + .name("a") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("b") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .name("c") + .type(Schema.create(Schema.Type.STRING)) + .noDefault() + .endRecord(); + + public static final String JSON_SCHEMA_STRING = "{\"id\": \"person.json\", " + + "\"title\": \"Person\", " + + "\"type\": \"object\", " + + "\"properties\": { " + + "\"name\": {" + + "\"type\": \"string\"" + + "}," + + "\"age\": {" + + "\"type\": \"integer\", \"minimum\": 0" + + "}" + + "}" + + "}"; +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/User.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/User.java new file mode 100644 index 000000000..6ff4f2322 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/User.java @@ -0,0 +1,28 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.testobjs; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.Getter; +import lombok.NoArgsConstructor; + +@Data +@AllArgsConstructor +@NoArgsConstructor +public class User { + @Getter + private String name; + @Getter + private Address address; + @Getter + private int age; + +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/ProtobufTest.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/ProtobufTest.java new file mode 100644 index 000000000..96a34c85e --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/ProtobufTest.java @@ -0,0 +1,2836 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: protobufTest.proto + +package io.pravega.schemaregistry.testobjs.generated; + +public final class ProtobufTest { + private ProtobufTest() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + public interface InternalMessageOrBuilder extends + // @@protoc_insertion_point(interface_extends:io.pravega.schemaregistry.testobjs.generated.InternalMessage) + com.google.protobuf.MessageOrBuilder { + + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @return The enum numeric value on the wire for value. + */ + int getValueValue(); + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @return The value. + */ + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values getValue(); + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.InternalMessage} + */ + public static final class InternalMessage extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:io.pravega.schemaregistry.testobjs.generated.InternalMessage) + InternalMessageOrBuilder { + private static final long serialVersionUID = 0L; + // Use InternalMessage.newBuilder() to construct. + private InternalMessage(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private InternalMessage() { + value_ = 0; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new InternalMessage(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private InternalMessage( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + int rawValue = input.readEnum(); + + value_ = rawValue; + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder.class); + } + + /** + * Protobuf enum {@code io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values} + */ + public enum Values + implements com.google.protobuf.ProtocolMessageEnum { + /** + * val1 = 0; + */ + val1(0), + /** + * val2 = 1; + */ + val2(1), + /** + * val3 = 2; + */ + val3(2), + /** + * val4 = 3; + */ + val4(3), + UNRECOGNIZED(-1), + ; + + /** + * val1 = 0; + */ + public static final int val1_VALUE = 0; + /** + * val2 = 1; + */ + public static final int val2_VALUE = 1; + /** + * val3 = 2; + */ + public static final int val3_VALUE = 2; + /** + * val4 = 3; + */ + public static final int val4_VALUE = 3; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static Values valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static Values forNumber(int value) { + switch (value) { + case 0: return val1; + case 1: return val2; + case 2: return val3; + case 3: return val4; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + Values> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Values findValueByNumber(int number) { + return Values.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDescriptor().getEnumTypes().get(0); + } + + private static final Values[] VALUES = values(); + + public static Values valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private Values(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values) + } + + public static final int VALUE_FIELD_NUMBER = 1; + private int value_; + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @return The enum numeric value on the wire for value. + */ + public int getValueValue() { + return value_; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @return The value. + */ + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values getValue() { + @SuppressWarnings("deprecation") + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values result = io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.valueOf(value_); + return result == null ? io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.UNRECOGNIZED : result; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (value_ != io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.val1.getNumber()) { + output.writeEnum(1, value_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (value_ != io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.val1.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, value_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage)) { + return super.equals(obj); + } + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage other = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage) obj; + + if (value_ != other.value_) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + value_; + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.InternalMessage} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:io.pravega.schemaregistry.testobjs.generated.InternalMessage) + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder.class); + } + + // Construct using io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + value_ = 0; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getDefaultInstanceForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance(); + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage build() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage buildPartial() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage result = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage(this); + result.value_ = value_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage) { + return mergeFrom((io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage other) { + if (other == io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance()) return this; + if (other.value_ != 0) { + setValueValue(other.getValueValue()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int value_ = 0; + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @return The enum numeric value on the wire for value. + */ + public int getValueValue() { + return value_; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @param value The enum numeric value on the wire for value to set. + * @return This builder for chaining. + */ + public Builder setValueValue(int value) { + value_ = value; + onChanged(); + return this; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @return The value. + */ + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values getValue() { + @SuppressWarnings("deprecation") + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values result = io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.valueOf(value_); + return result == null ? io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values.UNRECOGNIZED : result; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @param value The value to set. + * @return This builder for chaining. + */ + public Builder setValue(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Values value) { + if (value == null) { + throw new NullPointerException(); + } + + value_ = value.getNumber(); + onChanged(); + return this; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage.Values value = 1; + * @return This builder for chaining. + */ + public Builder clearValue() { + + value_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:io.pravega.schemaregistry.testobjs.generated.InternalMessage) + } + + // @@protoc_insertion_point(class_scope:io.pravega.schemaregistry.testobjs.generated.InternalMessage) + private static final io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage(); + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public InternalMessage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new InternalMessage(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Message1OrBuilder extends + // @@protoc_insertion_point(interface_extends:io.pravega.schemaregistry.testobjs.generated.Message1) + com.google.protobuf.MessageOrBuilder { + + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + * @return Whether the internal field is set. + */ + boolean hasInternal(); + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + * @return The internal. + */ + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getInternal(); + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder getInternalOrBuilder(); + + /** + * string name = 2; + * @return The name. + */ + java.lang.String getName(); + /** + * string name = 2; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.Message1} + */ + public static final class Message1 extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:io.pravega.schemaregistry.testobjs.generated.Message1) + Message1OrBuilder { + private static final long serialVersionUID = 0L; + // Use Message1.newBuilder() to construct. + private Message1(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Message1() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Message1(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Message1( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder subBuilder = null; + if (internal_ != null) { + subBuilder = internal_.toBuilder(); + } + internal_ = input.readMessage(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(internal_); + internal_ = subBuilder.buildPartial(); + } + + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.Builder.class); + } + + public static final int INTERNAL_FIELD_NUMBER = 1; + private io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage internal_; + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + * @return Whether the internal field is set. + */ + public boolean hasInternal() { + return internal_ != null; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + * @return The internal. + */ + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getInternal() { + return internal_ == null ? io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance() : internal_; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder getInternalOrBuilder() { + return getInternal(); + } + + public static final int NAME_FIELD_NUMBER = 2; + private volatile java.lang.Object name_; + /** + * string name = 2; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * string name = 2; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (internal_ != null) { + output.writeMessage(1, getInternal()); + } + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, name_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (internal_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getInternal()); + } + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, name_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1)) { + return super.equals(obj); + } + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 other = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1) obj; + + if (hasInternal() != other.hasInternal()) return false; + if (hasInternal()) { + if (!getInternal() + .equals(other.getInternal())) return false; + } + if (!getName() + .equals(other.getName())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasInternal()) { + hash = (37 * hash) + INTERNAL_FIELD_NUMBER; + hash = (53 * hash) + getInternal().hashCode(); + } + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.Message1} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:io.pravega.schemaregistry.testobjs.generated.Message1) + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1OrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.Builder.class); + } + + // Construct using io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + if (internalBuilder_ == null) { + internal_ = null; + } else { + internal_ = null; + internalBuilder_ = null; + } + name_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 getDefaultInstanceForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.getDefaultInstance(); + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 build() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 buildPartial() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 result = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1(this); + if (internalBuilder_ == null) { + result.internal_ = internal_; + } else { + result.internal_ = internalBuilder_.build(); + } + result.name_ = name_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1) { + return mergeFrom((io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 other) { + if (other == io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1.getDefaultInstance()) return this; + if (other.hasInternal()) { + mergeInternal(other.getInternal()); + } + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage internal_; + private com.google.protobuf.SingleFieldBuilderV3< + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder> internalBuilder_; + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + * @return Whether the internal field is set. + */ + public boolean hasInternal() { + return internalBuilder_ != null || internal_ != null; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + * @return The internal. + */ + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage getInternal() { + if (internalBuilder_ == null) { + return internal_ == null ? io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance() : internal_; + } else { + return internalBuilder_.getMessage(); + } + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + public Builder setInternal(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage value) { + if (internalBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + internal_ = value; + onChanged(); + } else { + internalBuilder_.setMessage(value); + } + + return this; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + public Builder setInternal( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder builderForValue) { + if (internalBuilder_ == null) { + internal_ = builderForValue.build(); + onChanged(); + } else { + internalBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + public Builder mergeInternal(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage value) { + if (internalBuilder_ == null) { + if (internal_ != null) { + internal_ = + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.newBuilder(internal_).mergeFrom(value).buildPartial(); + } else { + internal_ = value; + } + onChanged(); + } else { + internalBuilder_.mergeFrom(value); + } + + return this; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + public Builder clearInternal() { + if (internalBuilder_ == null) { + internal_ = null; + onChanged(); + } else { + internal_ = null; + internalBuilder_ = null; + } + + return this; + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder getInternalBuilder() { + + onChanged(); + return getInternalFieldBuilder().getBuilder(); + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder getInternalOrBuilder() { + if (internalBuilder_ != null) { + return internalBuilder_.getMessageOrBuilder(); + } else { + return internal_ == null ? + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.getDefaultInstance() : internal_; + } + } + /** + * .io.pravega.schemaregistry.testobjs.generated.InternalMessage internal = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder> + getInternalFieldBuilder() { + if (internalBuilder_ == null) { + internalBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessage.Builder, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.InternalMessageOrBuilder>( + getInternal(), + getParentForChildren(), + isClean()); + internal_ = null; + } + return internalBuilder_; + } + + private java.lang.Object name_ = ""; + /** + * string name = 2; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string name = 2; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string name = 2; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + * string name = 2; + * @return This builder for chaining. + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * string name = 2; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:io.pravega.schemaregistry.testobjs.generated.Message1) + } + + // @@protoc_insertion_point(class_scope:io.pravega.schemaregistry.testobjs.generated.Message1) + private static final io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1(); + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Message1 parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Message1(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message1 getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Message2OrBuilder extends + // @@protoc_insertion_point(interface_extends:io.pravega.schemaregistry.testobjs.generated.Message2) + com.google.protobuf.MessageOrBuilder { + + /** + * string name = 1; + * @return The name. + */ + java.lang.String getName(); + /** + * string name = 1; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + * int32 field1 = 2; + * @return The field1. + */ + int getField1(); + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.Message2} + */ + public static final class Message2 extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:io.pravega.schemaregistry.testobjs.generated.Message2) + Message2OrBuilder { + private static final long serialVersionUID = 0L; + // Use Message2.newBuilder() to construct. + private Message2(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Message2() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Message2(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Message2( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 16: { + + field1_ = input.readInt32(); + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + * string name = 1; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * string name = 1; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int FIELD1_FIELD_NUMBER = 2; + private int field1_; + /** + * int32 field1 = 2; + * @return The field1. + */ + public int getField1() { + return field1_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (field1_ != 0) { + output.writeInt32(2, field1_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (field1_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, field1_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2)) { + return super.equals(obj); + } + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 other = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2) obj; + + if (!getName() + .equals(other.getName())) return false; + if (getField1() + != other.getField1()) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + FIELD1_FIELD_NUMBER; + hash = (53 * hash) + getField1(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.Message2} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:io.pravega.schemaregistry.testobjs.generated.Message2) + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2OrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.Builder.class); + } + + // Construct using io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + field1_ = 0; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 getDefaultInstanceForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.getDefaultInstance(); + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 build() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 buildPartial() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 result = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2(this); + result.name_ = name_; + result.field1_ = field1_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2) { + return mergeFrom((io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 other) { + if (other == io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (other.getField1() != 0) { + setField1(other.getField1()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object name_ = ""; + /** + * string name = 1; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string name = 1; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string name = 1; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + * string name = 1; + * @return This builder for chaining. + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * string name = 1; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private int field1_ ; + /** + * int32 field1 = 2; + * @return The field1. + */ + public int getField1() { + return field1_; + } + /** + * int32 field1 = 2; + * @param value The field1 to set. + * @return This builder for chaining. + */ + public Builder setField1(int value) { + + field1_ = value; + onChanged(); + return this; + } + /** + * int32 field1 = 2; + * @return This builder for chaining. + */ + public Builder clearField1() { + + field1_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:io.pravega.schemaregistry.testobjs.generated.Message2) + } + + // @@protoc_insertion_point(class_scope:io.pravega.schemaregistry.testobjs.generated.Message2) + private static final io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2(); + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Message2 parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Message2(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message2 getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Message3OrBuilder extends + // @@protoc_insertion_point(interface_extends:io.pravega.schemaregistry.testobjs.generated.Message3) + com.google.protobuf.MessageOrBuilder { + + /** + * string name = 1; + * @return The name. + */ + java.lang.String getName(); + /** + * string name = 1; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + * int32 field1 = 2; + * @return The field1. + */ + int getField1(); + + /** + * int32 field2 = 3; + * @return The field2. + */ + int getField2(); + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.Message3} + */ + public static final class Message3 extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:io.pravega.schemaregistry.testobjs.generated.Message3) + Message3OrBuilder { + private static final long serialVersionUID = 0L; + // Use Message3.newBuilder() to construct. + private Message3(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Message3() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Message3(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Message3( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 16: { + + field1_ = input.readInt32(); + break; + } + case 24: { + + field2_ = input.readInt32(); + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + * string name = 1; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * string name = 1; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int FIELD1_FIELD_NUMBER = 2; + private int field1_; + /** + * int32 field1 = 2; + * @return The field1. + */ + public int getField1() { + return field1_; + } + + public static final int FIELD2_FIELD_NUMBER = 3; + private int field2_; + /** + * int32 field2 = 3; + * @return The field2. + */ + public int getField2() { + return field2_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (field1_ != 0) { + output.writeInt32(2, field1_); + } + if (field2_ != 0) { + output.writeInt32(3, field2_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (field1_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, field1_); + } + if (field2_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, field2_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3)) { + return super.equals(obj); + } + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 other = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3) obj; + + if (!getName() + .equals(other.getName())) return false; + if (getField1() + != other.getField1()) return false; + if (getField2() + != other.getField2()) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + FIELD1_FIELD_NUMBER; + hash = (53 * hash) + getField1(); + hash = (37 * hash) + FIELD2_FIELD_NUMBER; + hash = (53 * hash) + getField2(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code io.pravega.schemaregistry.testobjs.generated.Message3} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:io.pravega.schemaregistry.testobjs.generated.Message3) + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3OrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_fieldAccessorTable + .ensureFieldAccessorsInitialized( + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.class, io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.Builder.class); + } + + // Construct using io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + field1_ = 0; + + field2_ = 0; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 getDefaultInstanceForType() { + return io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.getDefaultInstance(); + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 build() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 buildPartial() { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 result = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3(this); + result.name_ = name_; + result.field1_ = field1_; + result.field2_ = field2_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3) { + return mergeFrom((io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 other) { + if (other == io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (other.getField1() != 0) { + setField1(other.getField1()); + } + if (other.getField2() != 0) { + setField2(other.getField2()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object name_ = ""; + /** + * string name = 1; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string name = 1; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string name = 1; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + * string name = 1; + * @return This builder for chaining. + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * string name = 1; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private int field1_ ; + /** + * int32 field1 = 2; + * @return The field1. + */ + public int getField1() { + return field1_; + } + /** + * int32 field1 = 2; + * @param value The field1 to set. + * @return This builder for chaining. + */ + public Builder setField1(int value) { + + field1_ = value; + onChanged(); + return this; + } + /** + * int32 field1 = 2; + * @return This builder for chaining. + */ + public Builder clearField1() { + + field1_ = 0; + onChanged(); + return this; + } + + private int field2_ ; + /** + * int32 field2 = 3; + * @return The field2. + */ + public int getField2() { + return field2_; + } + /** + * int32 field2 = 3; + * @param value The field2 to set. + * @return This builder for chaining. + */ + public Builder setField2(int value) { + + field2_ = value; + onChanged(); + return this; + } + /** + * int32 field2 = 3; + * @return This builder for chaining. + */ + public Builder clearField2() { + + field2_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:io.pravega.schemaregistry.testobjs.generated.Message3) + } + + // @@protoc_insertion_point(class_scope:io.pravega.schemaregistry.testobjs.generated.Message3) + private static final io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3(); + } + + public static io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Message3 parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Message3(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public io.pravega.schemaregistry.testobjs.generated.ProtobufTest.Message3 getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\022protobufTest.proto\022,io.pravega.schemar" + + "egistry.testobjs.generated\"\230\001\n\017InternalM" + + "essage\022S\n\005value\030\001 \001(\0162D.io.pravega.schem" + + "aregistry.testobjs.generated.InternalMes" + + "sage.Values\"0\n\006Values\022\010\n\004val1\020\000\022\010\n\004val2\020" + + "\001\022\010\n\004val3\020\002\022\010\n\004val4\020\003\"i\n\010Message1\022O\n\010int" + + "ernal\030\001 \001(\0132=.io.pravega.schemaregistry." + + "testobjs.generated.InternalMessage\022\014\n\004na" + + "me\030\002 \001(\t\"(\n\010Message2\022\014\n\004name\030\001 \001(\t\022\016\n\006fi" + + "eld1\030\002 \001(\005\"8\n\010Message3\022\014\n\004name\030\001 \001(\t\022\016\n\006" + + "field1\030\002 \001(\005\022\016\n\006field2\030\003 \001(\005b\006proto3" + }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }); + internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_io_pravega_schemaregistry_testobjs_generated_InternalMessage_descriptor, + new java.lang.String[] { "Value", }); + internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_io_pravega_schemaregistry_testobjs_generated_Message1_descriptor, + new java.lang.String[] { "Internal", "Name", }); + internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_io_pravega_schemaregistry_testobjs_generated_Message2_descriptor, + new java.lang.String[] { "Name", "Field1", }); + internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_io_pravega_schemaregistry_testobjs_generated_Message3_descriptor, + new java.lang.String[] { "Name", "Field1", "Field2", }); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test1.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test1.java new file mode 100644 index 000000000..3cbe85040 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test1.java @@ -0,0 +1,389 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package io.pravega.schemaregistry.testobjs.generated; + +import org.apache.avro.generic.GenericArray; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.SchemaStore; + +@org.apache.avro.specific.AvroGenerated +public class Test1 extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -7987201165438288421L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Test1\",\"namespace\":\"io.pravega.schemaregistry.testobjs.generated\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"field1\",\"type\":\"int\"}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + + private static SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this Test1 to a ByteBuffer. + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a Test1 from a ByteBuffer. + * @param b a byte buffer holding serialized data for an instance of this class + * @return a Test1 instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class + */ + public static Test1 fromByteBuffer( + java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private CharSequence name; + private int field1; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public Test1() {} + + /** + * All-args constructor. + * @param name The new value for name + * @param field1 The new value for field1 + */ + public Test1(CharSequence name, Integer field1) { + this.name = name; + this.field1 = field1; + } + + public SpecificData getSpecificData() { return MODEL$; } + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public Object get(int field$) { + switch (field$) { + case 0: return name; + case 1: return field1; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, Object value$) { + switch (field$) { + case 0: name = (CharSequence)value$; break; + case 1: field1 = (Integer)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'name' field. + * @return The value of the 'name' field. + */ + public CharSequence getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value the value to set. + */ + public void setName(CharSequence value) { + this.name = value; + } + + /** + * Gets the value of the 'field1' field. + * @return The value of the 'field1' field. + */ + public int getField1() { + return field1; + } + + + /** + * Sets the value of the 'field1' field. + * @param value the value to set. + */ + public void setField1(int value) { + this.field1 = value; + } + + /** + * Creates a new Test1 RecordBuilder. + * @return A new Test1 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test1.Builder newBuilder() { + return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(); + } + + /** + * Creates a new Test1 RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new Test1 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test1.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test1.Builder other) { + if (other == null) { + return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(); + } else { + return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(other); + } + } + + /** + * Creates a new Test1 RecordBuilder by copying an existing Test1 instance. + * @param other The existing instance to copy. + * @return A new Test1 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test1.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test1 other) { + if (other == null) { + return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(); + } else { + return new io.pravega.schemaregistry.testobjs.generated.Test1.Builder(other); + } + } + + /** + * RecordBuilder for Test1 instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private CharSequence name; + private int field1; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(io.pravega.schemaregistry.testobjs.generated.Test1.Builder other) { + super(other); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.field1)) { + this.field1 = data().deepCopy(fields()[1].schema(), other.field1); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + } + + /** + * Creates a Builder by copying an existing Test1 instance + * @param other The existing instance to copy. + */ + private Builder(io.pravega.schemaregistry.testobjs.generated.Test1 other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.field1)) { + this.field1 = data().deepCopy(fields()[1].schema(), other.field1); + fieldSetFlags()[1] = true; + } + } + + /** + * Gets the value of the 'name' field. + * @return The value. + */ + public CharSequence getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value The value of 'name'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test1.Builder setName(CharSequence value) { + validate(fields()[0], value); + this.name = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'name' field has been set. + * @return True if the 'name' field has been set, false otherwise. + */ + public boolean hasName() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'name' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test1.Builder clearName() { + name = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'field1' field. + * @return The value. + */ + public int getField1() { + return field1; + } + + + /** + * Sets the value of the 'field1' field. + * @param value The value of 'field1'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test1.Builder setField1(int value) { + validate(fields()[1], value); + this.field1 = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'field1' field has been set. + * @return True if the 'field1' field has been set, false otherwise. + */ + public boolean hasField1() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'field1' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test1.Builder clearField1() { + fieldSetFlags()[1] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public Test1 build() { + try { + Test1 record = new Test1(); + record.name = fieldSetFlags()[0] ? this.name : (CharSequence) defaultValue(fields()[0]); + record.field1 = fieldSetFlags()[1] ? this.field1 : (Integer) defaultValue(fields()[1]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter + WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader + READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override protected boolean hasCustomCoders() { return true; } + + @Override public void customEncode(org.apache.avro.io.Encoder out) + throws java.io.IOException + { + out.writeString(this.name); + + out.writeInt(this.field1); + + } + + @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) + throws java.io.IOException + { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); + + this.field1 = in.readInt(); + + } else { + for (int i = 0; i < 2; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); + break; + + case 1: + this.field1 = in.readInt(); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} + + + + + + + + + + diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test2.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test2.java new file mode 100644 index 000000000..3695a01a3 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test2.java @@ -0,0 +1,469 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package io.pravega.schemaregistry.testobjs.generated; + +import org.apache.avro.generic.GenericArray; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.SchemaStore; + +@org.apache.avro.specific.AvroGenerated +public class Test2 extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -8157678982198772485L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Test2\",\"namespace\":\"io.pravega.schemaregistry.testobjs.generated\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"string\"}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + + private static SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this Test2 to a ByteBuffer. + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a Test2 from a ByteBuffer. + * @param b a byte buffer holding serialized data for an instance of this class + * @return a Test2 instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class + */ + public static Test2 fromByteBuffer( + java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private CharSequence name; + private int field1; + private CharSequence field2; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public Test2() {} + + /** + * All-args constructor. + * @param name The new value for name + * @param field1 The new value for field1 + * @param field2 The new value for field2 + */ + public Test2(CharSequence name, Integer field1, CharSequence field2) { + this.name = name; + this.field1 = field1; + this.field2 = field2; + } + + public SpecificData getSpecificData() { return MODEL$; } + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public Object get(int field$) { + switch (field$) { + case 0: return name; + case 1: return field1; + case 2: return field2; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, Object value$) { + switch (field$) { + case 0: name = (CharSequence)value$; break; + case 1: field1 = (Integer)value$; break; + case 2: field2 = (CharSequence)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'name' field. + * @return The value of the 'name' field. + */ + public CharSequence getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value the value to set. + */ + public void setName(CharSequence value) { + this.name = value; + } + + /** + * Gets the value of the 'field1' field. + * @return The value of the 'field1' field. + */ + public int getField1() { + return field1; + } + + + /** + * Sets the value of the 'field1' field. + * @param value the value to set. + */ + public void setField1(int value) { + this.field1 = value; + } + + /** + * Gets the value of the 'field2' field. + * @return The value of the 'field2' field. + */ + public CharSequence getField2() { + return field2; + } + + + /** + * Sets the value of the 'field2' field. + * @param value the value to set. + */ + public void setField2(CharSequence value) { + this.field2 = value; + } + + /** + * Creates a new Test2 RecordBuilder. + * @return A new Test2 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test2.Builder newBuilder() { + return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(); + } + + /** + * Creates a new Test2 RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new Test2 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test2.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test2.Builder other) { + if (other == null) { + return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(); + } else { + return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(other); + } + } + + /** + * Creates a new Test2 RecordBuilder by copying an existing Test2 instance. + * @param other The existing instance to copy. + * @return A new Test2 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test2.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test2 other) { + if (other == null) { + return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(); + } else { + return new io.pravega.schemaregistry.testobjs.generated.Test2.Builder(other); + } + } + + /** + * RecordBuilder for Test2 instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private CharSequence name; + private int field1; + private CharSequence field2; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(io.pravega.schemaregistry.testobjs.generated.Test2.Builder other) { + super(other); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.field1)) { + this.field1 = data().deepCopy(fields()[1].schema(), other.field1); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.field2)) { + this.field2 = data().deepCopy(fields()[2].schema(), other.field2); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + } + + /** + * Creates a Builder by copying an existing Test2 instance + * @param other The existing instance to copy. + */ + private Builder(io.pravega.schemaregistry.testobjs.generated.Test2 other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.field1)) { + this.field1 = data().deepCopy(fields()[1].schema(), other.field1); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.field2)) { + this.field2 = data().deepCopy(fields()[2].schema(), other.field2); + fieldSetFlags()[2] = true; + } + } + + /** + * Gets the value of the 'name' field. + * @return The value. + */ + public CharSequence getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value The value of 'name'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test2.Builder setName(CharSequence value) { + validate(fields()[0], value); + this.name = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'name' field has been set. + * @return True if the 'name' field has been set, false otherwise. + */ + public boolean hasName() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'name' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test2.Builder clearName() { + name = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'field1' field. + * @return The value. + */ + public int getField1() { + return field1; + } + + + /** + * Sets the value of the 'field1' field. + * @param value The value of 'field1'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test2.Builder setField1(int value) { + validate(fields()[1], value); + this.field1 = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'field1' field has been set. + * @return True if the 'field1' field has been set, false otherwise. + */ + public boolean hasField1() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'field1' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test2.Builder clearField1() { + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'field2' field. + * @return The value. + */ + public CharSequence getField2() { + return field2; + } + + + /** + * Sets the value of the 'field2' field. + * @param value The value of 'field2'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test2.Builder setField2(CharSequence value) { + validate(fields()[2], value); + this.field2 = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'field2' field has been set. + * @return True if the 'field2' field has been set, false otherwise. + */ + public boolean hasField2() { + return fieldSetFlags()[2]; + } + + + /** + * Clears the value of the 'field2' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test2.Builder clearField2() { + field2 = null; + fieldSetFlags()[2] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public Test2 build() { + try { + Test2 record = new Test2(); + record.name = fieldSetFlags()[0] ? this.name : (CharSequence) defaultValue(fields()[0]); + record.field1 = fieldSetFlags()[1] ? this.field1 : (Integer) defaultValue(fields()[1]); + record.field2 = fieldSetFlags()[2] ? this.field2 : (CharSequence) defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter + WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader + READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override protected boolean hasCustomCoders() { return true; } + + @Override public void customEncode(org.apache.avro.io.Encoder out) + throws java.io.IOException + { + out.writeString(this.name); + + out.writeInt(this.field1); + + out.writeString(this.field2); + + } + + @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) + throws java.io.IOException + { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); + + this.field1 = in.readInt(); + + this.field2 = in.readString(this.field2 instanceof Utf8 ? (Utf8)this.field2 : null); + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); + break; + + case 1: + this.field1 = in.readInt(); + break; + + case 2: + this.field2 = in.readString(this.field2 instanceof Utf8 ? (Utf8)this.field2 : null); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} + + + + + + + + + + diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test3.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test3.java new file mode 100644 index 000000000..32a122681 --- /dev/null +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/generated/Test3.java @@ -0,0 +1,549 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package io.pravega.schemaregistry.testobjs.generated; + +import org.apache.avro.generic.GenericArray; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.SchemaStore; + +@org.apache.avro.specific.AvroGenerated +public class Test3 extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = 910195546659301614L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Test3\",\"namespace\":\"io.pravega.schemaregistry.testobjs.generated\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"string\"},{\"name\":\"field3\",\"type\":\"string\"}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + + private static SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this Test3 to a ByteBuffer. + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a Test3 from a ByteBuffer. + * @param b a byte buffer holding serialized data for an instance of this class + * @return a Test3 instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class + */ + public static Test3 fromByteBuffer( + java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private CharSequence name; + private int field1; + private CharSequence field2; + private CharSequence field3; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public Test3() {} + + /** + * All-args constructor. + * @param name The new value for name + * @param field1 The new value for field1 + * @param field2 The new value for field2 + * @param field3 The new value for field3 + */ + public Test3(CharSequence name, Integer field1, CharSequence field2, CharSequence field3) { + this.name = name; + this.field1 = field1; + this.field2 = field2; + this.field3 = field3; + } + + public SpecificData getSpecificData() { return MODEL$; } + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public Object get(int field$) { + switch (field$) { + case 0: return name; + case 1: return field1; + case 2: return field2; + case 3: return field3; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, Object value$) { + switch (field$) { + case 0: name = (CharSequence)value$; break; + case 1: field1 = (Integer)value$; break; + case 2: field2 = (CharSequence)value$; break; + case 3: field3 = (CharSequence)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'name' field. + * @return The value of the 'name' field. + */ + public CharSequence getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value the value to set. + */ + public void setName(CharSequence value) { + this.name = value; + } + + /** + * Gets the value of the 'field1' field. + * @return The value of the 'field1' field. + */ + public int getField1() { + return field1; + } + + + /** + * Sets the value of the 'field1' field. + * @param value the value to set. + */ + public void setField1(int value) { + this.field1 = value; + } + + /** + * Gets the value of the 'field2' field. + * @return The value of the 'field2' field. + */ + public CharSequence getField2() { + return field2; + } + + + /** + * Sets the value of the 'field2' field. + * @param value the value to set. + */ + public void setField2(CharSequence value) { + this.field2 = value; + } + + /** + * Gets the value of the 'field3' field. + * @return The value of the 'field3' field. + */ + public CharSequence getField3() { + return field3; + } + + + /** + * Sets the value of the 'field3' field. + * @param value the value to set. + */ + public void setField3(CharSequence value) { + this.field3 = value; + } + + /** + * Creates a new Test3 RecordBuilder. + * @return A new Test3 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test3.Builder newBuilder() { + return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(); + } + + /** + * Creates a new Test3 RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new Test3 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test3.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test3.Builder other) { + if (other == null) { + return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(); + } else { + return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(other); + } + } + + /** + * Creates a new Test3 RecordBuilder by copying an existing Test3 instance. + * @param other The existing instance to copy. + * @return A new Test3 RecordBuilder + */ + public static io.pravega.schemaregistry.testobjs.generated.Test3.Builder newBuilder(io.pravega.schemaregistry.testobjs.generated.Test3 other) { + if (other == null) { + return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(); + } else { + return new io.pravega.schemaregistry.testobjs.generated.Test3.Builder(other); + } + } + + /** + * RecordBuilder for Test3 instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private CharSequence name; + private int field1; + private CharSequence field2; + private CharSequence field3; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(io.pravega.schemaregistry.testobjs.generated.Test3.Builder other) { + super(other); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.field1)) { + this.field1 = data().deepCopy(fields()[1].schema(), other.field1); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.field2)) { + this.field2 = data().deepCopy(fields()[2].schema(), other.field2); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.field3)) { + this.field3 = data().deepCopy(fields()[3].schema(), other.field3); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + } + + /** + * Creates a Builder by copying an existing Test3 instance + * @param other The existing instance to copy. + */ + private Builder(io.pravega.schemaregistry.testobjs.generated.Test3 other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.field1)) { + this.field1 = data().deepCopy(fields()[1].schema(), other.field1); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.field2)) { + this.field2 = data().deepCopy(fields()[2].schema(), other.field2); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.field3)) { + this.field3 = data().deepCopy(fields()[3].schema(), other.field3); + fieldSetFlags()[3] = true; + } + } + + /** + * Gets the value of the 'name' field. + * @return The value. + */ + public CharSequence getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value The value of 'name'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder setName(CharSequence value) { + validate(fields()[0], value); + this.name = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'name' field has been set. + * @return True if the 'name' field has been set, false otherwise. + */ + public boolean hasName() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'name' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder clearName() { + name = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'field1' field. + * @return The value. + */ + public int getField1() { + return field1; + } + + + /** + * Sets the value of the 'field1' field. + * @param value The value of 'field1'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder setField1(int value) { + validate(fields()[1], value); + this.field1 = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'field1' field has been set. + * @return True if the 'field1' field has been set, false otherwise. + */ + public boolean hasField1() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'field1' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder clearField1() { + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'field2' field. + * @return The value. + */ + public CharSequence getField2() { + return field2; + } + + + /** + * Sets the value of the 'field2' field. + * @param value The value of 'field2'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder setField2(CharSequence value) { + validate(fields()[2], value); + this.field2 = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'field2' field has been set. + * @return True if the 'field2' field has been set, false otherwise. + */ + public boolean hasField2() { + return fieldSetFlags()[2]; + } + + + /** + * Clears the value of the 'field2' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder clearField2() { + field2 = null; + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'field3' field. + * @return The value. + */ + public CharSequence getField3() { + return field3; + } + + + /** + * Sets the value of the 'field3' field. + * @param value The value of 'field3'. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder setField3(CharSequence value) { + validate(fields()[3], value); + this.field3 = value; + fieldSetFlags()[3] = true; + return this; + } + + /** + * Checks whether the 'field3' field has been set. + * @return True if the 'field3' field has been set, false otherwise. + */ + public boolean hasField3() { + return fieldSetFlags()[3]; + } + + + /** + * Clears the value of the 'field3' field. + * @return This builder. + */ + public io.pravega.schemaregistry.testobjs.generated.Test3.Builder clearField3() { + field3 = null; + fieldSetFlags()[3] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public Test3 build() { + try { + Test3 record = new Test3(); + record.name = fieldSetFlags()[0] ? this.name : (CharSequence) defaultValue(fields()[0]); + record.field1 = fieldSetFlags()[1] ? this.field1 : (Integer) defaultValue(fields()[1]); + record.field2 = fieldSetFlags()[2] ? this.field2 : (CharSequence) defaultValue(fields()[2]); + record.field3 = fieldSetFlags()[3] ? this.field3 : (CharSequence) defaultValue(fields()[3]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter + WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader + READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override protected boolean hasCustomCoders() { return true; } + + @Override public void customEncode(org.apache.avro.io.Encoder out) + throws java.io.IOException + { + out.writeString(this.name); + + out.writeInt(this.field1); + + out.writeString(this.field2); + + out.writeString(this.field3); + + } + + @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) + throws java.io.IOException + { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); + + this.field1 = in.readInt(); + + this.field2 = in.readString(this.field2 instanceof Utf8 ? (Utf8)this.field2 : null); + + this.field3 = in.readString(this.field3 instanceof Utf8 ? (Utf8)this.field3 : null); + + } else { + for (int i = 0; i < 4; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); + break; + + case 1: + this.field1 = in.readInt(); + break; + + case 2: + this.field2 = in.readString(this.field2 instanceof Utf8 ? (Utf8)this.field2 : null); + break; + + case 3: + this.field3 = in.readString(this.field3 instanceof Utf8 ? (Utf8)this.field3 : null); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} + + + + + + + + + + diff --git a/serializers/src/test/resources/avro/avroTest1.avsc b/serializers/src/test/resources/avro/avroTest1.avsc new file mode 100644 index 000000000..a7d5e71ea --- /dev/null +++ b/serializers/src/test/resources/avro/avroTest1.avsc @@ -0,0 +1,9 @@ +{ + "namespace": "io.pravega.schemaregistry.testobjs.generated", + "type": "record", + "name": "Type1", + "fields": [ + {"name": "a", "type": "string"}, + {"name": "b", "type": "int"} + ] +} \ No newline at end of file diff --git a/serializers/src/test/resources/avro/avroTest2.avsc b/serializers/src/test/resources/avro/avroTest2.avsc new file mode 100644 index 000000000..f0ebc9c52 --- /dev/null +++ b/serializers/src/test/resources/avro/avroTest2.avsc @@ -0,0 +1,10 @@ +{ + "namespace": "io.pravega.schemaregistry.testobjs.generated", + "type": "record", + "name": "Type2", + "fields": [ + {"name": "c", "type": "string"}, + {"name": "d", "type": "int"}, + {"name": "e", "type": "string"} + ] +} \ No newline at end of file diff --git a/serializers/src/test/resources/avro/avroTest3.avsc b/serializers/src/test/resources/avro/avroTest3.avsc new file mode 100644 index 000000000..ed07a0543 --- /dev/null +++ b/serializers/src/test/resources/avro/avroTest3.avsc @@ -0,0 +1,11 @@ +{ + "namespace": "io.pravega.schemaregistry.testobjs.generated", + "type": "record", + "name": "Type3", + "fields": [ + {"name": "f", "type": "string"}, + {"name": "g", "type": "int"}, + {"name": "h", "type": "string"}, + {"name": "i", "type": "string"} + ] +} \ No newline at end of file diff --git a/serializers/src/test/resources/proto/protobufTest.pb b/serializers/src/test/resources/proto/protobufTest.pb new file mode 100644 index 0000000000000000000000000000000000000000..b0226bf035f02993af92492781aa97c165d07de3 GIT binary patch literal 498 zcmd;j&&(xMP?TSipH!L_l3HA%2j&RrWaa}|iDjwjiF(D!8L7F6MXBkT#U(|RdL=+r z`AJ#Ddg-ZosYQt;sVPeH8M*j9^GZ^S@)C1=Q;Ule(^G|_xLC^)b4pVs7!?>b_>5fi zNHI|l%{09*kWs}!5F?cgxY!^9LL6KyKwAt27@#a80Y)gxSbz!2G7(@_s^sE;I?PZg zl8Yk~>R7PjxQ%Qna$FGHI3Xb}mb}E=R0$@apE-lTY$Y?e)kbi6P?$jFg~Yhn(lS$X jQVhYmSc9NEB|o@6V^Z|N%`=i<2HFBM&nSrv9J9s%3E`D{ literal 0 HcmV?d00001 diff --git a/serializers/src/test/resources/proto/protobufTest.proto b/serializers/src/test/resources/proto/protobufTest.proto new file mode 100644 index 000000000..4edfcae2b --- /dev/null +++ b/serializers/src/test/resources/proto/protobufTest.proto @@ -0,0 +1,28 @@ +syntax = "proto3"; +package io.pravega.schemaregistry.testobjs.generated; + +message InternalMessage { + enum Values { + val1 = 0; + val2 = 1; + val3 = 2; + val4 = 3; + } + Values value = 1; +} + +message Message1 { + InternalMessage internal = 1; + string name = 2; +} + +message Message2 { + string name = 1; + int32 field1 = 2; +} + +message Message3 { + string name = 1; + int32 field1 = 2; + int32 field2 = 3; +} diff --git a/settings.gradle b/settings.gradle index 600f3080a..8e77f5cc1 100644 --- a/settings.gradle +++ b/settings.gradle @@ -10,3 +10,8 @@ */ rootProject.name = 'schema-registry' +include 'client', + 'common', + 'contract', + 'serializers' + \ No newline at end of file From 8ee1fef5fdc3320577de3ebb4a17db3e0312a03e Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Mon, 8 Jun 2020 07:50:57 -0700 Subject: [PATCH 04/70] contract Signed-off-by: Shivesh Ranjan --- .../common/ContinuationTokenIterator.java | 13 +------------ .../common/ContinuationTokenIteratorTest.java | 18 +----------------- .../schemaregistry/contract/v1/ApiV1.java | 4 ++-- 3 files changed, 4 insertions(+), 31 deletions(-) diff --git a/common/src/main/java/io/pravega/schemaregistry/common/ContinuationTokenIterator.java b/common/src/main/java/io/pravega/schemaregistry/common/ContinuationTokenIterator.java index ff998e60d..ef797d419 100644 --- a/common/src/main/java/io/pravega/schemaregistry/common/ContinuationTokenIterator.java +++ b/common/src/main/java/io/pravega/schemaregistry/common/ContinuationTokenIterator.java @@ -13,12 +13,10 @@ import javax.annotation.concurrent.GuardedBy; import java.util.Collection; -import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import java.util.Queue; -import java.util.Set; import java.util.concurrent.LinkedBlockingQueue; import java.util.function.Function; @@ -39,8 +37,6 @@ public class ContinuationTokenIterator implements Iterator { private T next; @GuardedBy("$lock") private boolean canHaveNext; - @GuardedBy("$lock") - private final Set tokens; public ContinuationTokenIterator(Function>> loadingFunction, Token tokenIdentity) { this.loadingFunction = loadingFunction; @@ -48,7 +44,6 @@ public ContinuationTokenIterator(Function> this.token = tokenIdentity; this.canHaveNext = true; this.next = null; - this.tokens = new HashSet<>(); } @Synchronized @@ -56,17 +51,11 @@ private void load() { next = next == null ? queue.poll() : next; while (next == null && canHaveNext) { Map.Entry> result = loadingFunction.apply(token); - boolean tokenUpdated = result.getKey() != null && !tokens.contains(result.getKey()); - if (result.getKey() != null) { - tokens.add(result.getKey()); - } token = result.getKey(); queue.addAll(result.getValue()); next = queue.poll(); - if (next == null) { - canHaveNext = tokenUpdated; - } + canHaveNext = next != null; } } diff --git a/common/src/test/java/io/pravega/schemaregistry/common/ContinuationTokenIteratorTest.java b/common/src/test/java/io/pravega/schemaregistry/common/ContinuationTokenIteratorTest.java index 89989512a..0d7d50e4d 100644 --- a/common/src/test/java/io/pravega/schemaregistry/common/ContinuationTokenIteratorTest.java +++ b/common/src/test/java/io/pravega/schemaregistry/common/ContinuationTokenIteratorTest.java @@ -28,19 +28,9 @@ public class ContinuationTokenIteratorTest { @Test public void test() { - // 1. call method 1st call returns - list of 5 items + new token - // verify that call method is not called until all 10 are read. - // 2. call returns empty list + new token - // 3. call returns empty list + new token - // 4. call returns list of 10 items + new token - // verify that we consume 10 items without calling the callmethod - // 5. call returns empty list + same token. --> this should exit Queue responses = spy(new LinkedBlockingQueue<>()); responses.add(new ListWithToken(Lists.newArrayList(1, 2, 3, 4, 5), "1")); - responses.add(new ListWithToken(Collections.emptyList(), "2")); - responses.add(new ListWithToken(Collections.emptyList(), "3")); - responses.add(new ListWithToken(Lists.newArrayList(6, 7, 8, 9, 10), "4")); - responses.add(new ListWithToken(Collections.emptyList(), "4")); + responses.add(new ListWithToken(Collections.emptyList(), "")); Function>> func = token -> { ListWithToken result = responses.poll(); return new AbstractMap.SimpleEntry<>(result.token, result.list); @@ -51,13 +41,7 @@ public void test() { assertEquals(myIterator.next().intValue(), i + 1); } verify(responses, times(1)).poll(); - for (int i = 5; i < 10; i++) { - assertTrue(myIterator.hasNext()); - assertEquals(myIterator.next().intValue(), i + 1); - } - verify(responses, times(4)).poll(); assertFalse(myIterator.hasNext()); - verify(responses, times(5)).poll(); } @Data diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java index 7ce526198..5c851f0f6 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java @@ -1,10 +1,10 @@ /** * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - *

+ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - *

+ * * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.contract.v1; From 600ff74d224db7afb2c5020e4de838e103523e76 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Mon, 8 Jun 2020 07:53:20 -0700 Subject: [PATCH 05/70] remove

Signed-off-by: Shivesh Ranjan --- .../contract/generated/rest/server/api/StringUtil.java | 1 - 1 file changed, 1 deletion(-) diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java index 5d19e5e5f..1051f6941 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java @@ -22,7 +22,6 @@ public static boolean containsIgnoreCase(String[] array, String value) { * * Note: This might be replaced by utility method from commons-lang or guava someday * if one of those libraries is added as dependency. - *

* * @param array The array of strings * @param separator The separator From 88af2e76678412fb8e5fe797249961a1a0fac62a Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 10 Jun 2020 07:36:49 -0700 Subject: [PATCH 06/70] remove

Signed-off-by: Shivesh Ranjan --- .../schemaregistry/common/HashUtil.java | 21 ------------------- .../generated/rest/server/api/StringUtil.java | 1 - 2 files changed, 22 deletions(-) delete mode 100644 common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java diff --git a/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java b/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java deleted file mode 100644 index 3875cbb44..000000000 --- a/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java +++ /dev/null @@ -1,21 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry.common; - -import com.google.common.hash.HashFunction; -import com.google.common.hash.Hashing; - -public class HashUtil { - private static final HashFunction HASH = Hashing.murmur3_128(); - - public static long getFingerprint(byte[] bytes) { - return HASH.hashBytes(bytes).asLong(); - } -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java index 5d19e5e5f..1051f6941 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java @@ -22,7 +22,6 @@ public static boolean containsIgnoreCase(String[] array, String value) { * * Note: This might be replaced by utility method from commons-lang or guava someday * if one of those libraries is added as dependency. - *

* * @param array The array of strings * @param separator The separator From 1961ba1db5b96af8e43791ab6cf8b1241b82b90a Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 10 Jun 2020 07:38:41 -0700 Subject: [PATCH 07/70] add name util and hash util Signed-off-by: Shivesh Ranjan --- .../schemaregistry/common/HashUtil.java | 27 +++++++++++++ .../schemaregistry/common/NameUtil.java | 39 +++++++++++++++++++ .../schemaregistry/schemas/AvroSchema.java | 23 ++++++----- .../schemaregistry/schemas/JSONSchema.java | 32 +++++++++------ .../schemas/ProtobufSchema.java | 25 +++++++----- .../serializers/MultiplexedDeserializer.java | 7 +--- .../ProtobufGenericDeserlizer.java | 7 ++-- 7 files changed, 121 insertions(+), 39 deletions(-) create mode 100644 common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java create mode 100644 common/src/main/java/io/pravega/schemaregistry/common/NameUtil.java diff --git a/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java b/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java new file mode 100644 index 000000000..b3597bf1c --- /dev/null +++ b/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java @@ -0,0 +1,27 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.common; + +import com.google.common.hash.HashFunction; +import com.google.common.hash.Hashing; + +public class HashUtil { + private static final HashFunction HASH = Hashing.murmur3_128(); + + /** + * Computes a 64 bit hash of supplied bytes using 128 bit murmur3 hash function and taking its first 8 bytes. + * + * @param bytes bytes to compute hash of. + * @return a 64 bit hash of the given bytes. + */ + public static long getFingerprint(byte[] bytes) { + return HASH.hashBytes(bytes).asLong(); + } +} diff --git a/common/src/main/java/io/pravega/schemaregistry/common/NameUtil.java b/common/src/main/java/io/pravega/schemaregistry/common/NameUtil.java new file mode 100644 index 000000000..4bfe715f3 --- /dev/null +++ b/common/src/main/java/io/pravega/schemaregistry/common/NameUtil.java @@ -0,0 +1,39 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.common; + +public class NameUtil { + /** + * Extracts the name from the fully qualified type name. Name represents the last token after ".". + * If the qualified name does not contain "." then the name is same as qualified name. + * + * @param qualifiedName qualified name to extract name from. + * @return extracted name. + */ + public static String extractName(String qualifiedName) { + int nameStart = qualifiedName.lastIndexOf("."); + return qualifiedName.substring(nameStart + 1); + } + + /** + * Extracts name and the prefix qualifier before the name. Name represents the last token after ".". + * Qualifier is the prefix before the name. + * If the qualified name does not contain "." then the name is same as qualified name and qualifier is empty string. + * + * @param qualifiedName qualified name to extract tokens from. + * @return an array containing name at index 0 and qualifier at index 1. + */ + public static String[] extractNameAndQualifier(String qualifiedName) { + int nameStart = qualifiedName.lastIndexOf("."); + String name = qualifiedName.substring(nameStart + 1); + String pckg = nameStart < 0 ? "" : qualifiedName.substring(0, nameStart); + return new String[]{name, pckg}; + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java index 5cb4e7ae1..a1c4b6c24 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java @@ -25,25 +25,31 @@ /** * Container class for Avro Schema. - * + * * @param Type of element. */ public class AvroSchema implements SchemaContainer { @Getter private final Schema schema; private final SchemaInfo schemaInfo; - + private AvroSchema(Schema schema) { this.schema = schema; - this.schemaInfo = new SchemaInfo(schema.getFullName(), + this.schemaInfo = new SchemaInfo(schema.getName(), SerializationFormat.Avro, getSchemaBytes(), ImmutableMap.of()); } + private AvroSchema(SchemaInfo schemaInfo) { + String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); + this.schema = new Schema.Parser().parse(schemaString); + this.schemaInfo = schemaInfo; + } + /** * Method to create a typed AvroSchema for the given class. It extracts the avro schema from the class. * For Avro generated classes, the schema is retrieved from the class. * For POJOs the schema is extracted using avro's {@link ReflectData}. - * + * * @param tClass Class whose object's schema is used. * @param Type of the Java class. * @return {@link AvroSchema} with generic type T that extracts and captures the avro schema. @@ -86,14 +92,11 @@ public static AvroSchema ofBaseType(Class from(SchemaInfo schemainfo) { - String schemaString = new String(schemainfo.getSchemaData().array(), Charsets.UTF_8); - Schema schema = new Schema.Parser().parse(schemaString); - - return new AvroSchema<>(schema); + public static AvroSchema from(SchemaInfo schemaInfo) { + return new AvroSchema<>(schemaInfo); } private ByteBuffer getSchemaBytes() { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java index 958208e51..10d7f9fc5 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -25,7 +25,7 @@ /** * Container class for Json Schema. - * + * * @param Type of element. */ public class JSONSchema implements SchemaContainer { @@ -34,16 +34,16 @@ public class JSONSchema implements SchemaContainer { private final Class tClass; @Getter private final Class tDerivedClass; - + @Getter private final JsonSchema schema; private final SchemaInfo schemaInfo; - + private JSONSchema(JsonSchema schema, String name, String schemaString, Class tClass) { this(schema, name, schemaString, tClass, tClass); } - + private JSONSchema(JsonSchema schema, String name, String schemaString, Class tClass, Class tDerivedClass) { String type = name != null ? name : schema.getId(); // Add empty name if the name is not supplied and cannot be extracted from the json schema id. @@ -55,10 +55,18 @@ private JSONSchema(JsonSchema schema, String name, String schemaString, Class this.schema = schema; } + private JSONSchema(SchemaInfo schemaInfo, JsonSchema schema, String schemaString, Class tClass) { + this.schemaString = schemaString; + this.schemaInfo = schemaInfo; + this.tClass = tClass; + this.tDerivedClass = tClass; + this.schema = schema; + } + /** * Method to create a typed JSONSchema for the given class. It extracts the json schema from the class. * For POJOs the schema is extracted using jacksons {@link JsonSchemaGenerator}. - * + * * @param tClass Class whose object's schema is used. * @param Type of the Java class. * @return {@link JSONSchema} with generic type T that extracts and captures the json schema. @@ -69,10 +77,10 @@ public static JSONSchema of(Class tClass) { JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(objectMapper); JsonSchema schema = schemaGen.generateSchema(tClass); String schemaString = objectMapper.writeValueAsString(schema); - - return new JSONSchema<>(schema, null, schemaString, tClass); + + return new JSONSchema<>(schema, null, schemaString, tClass); } - + /** * Method to create a typed JSONSchema of type {@link Object} from the given schema. * @@ -83,10 +91,10 @@ public static JSONSchema of(Class tClass) { @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) public static JSONSchema of(String type, String schemaString) { ObjectMapper objectMapper = new ObjectMapper(); - JsonSchema schema = objectMapper.readValue(schemaString, JsonSchema.class); + JsonSchema schema = objectMapper.readValue(schemaString, JsonSchema.class); return new JSONSchema<>(schema, type, schemaString, Object.class); } - + @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) public static JSONSchema ofBaseType(Class tDerivedClass, Class tClass) { ObjectMapper objectMapper = new ObjectMapper(); @@ -107,9 +115,9 @@ public static JSONSchema ofBaseType(Class tDerivedClass, Cla public static JSONSchema from(SchemaInfo schemaInfo) { ObjectMapper objectMapper = new ObjectMapper(); String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); - + JsonSchema schema = objectMapper.readValue(schemaString, JsonSchema.class); - return new JSONSchema<>(schema, schemaInfo.getType(), schemaString, Object.class); + return new JSONSchema<>(schemaInfo, schema, schemaString, Object.class); } private ByteBuffer getSchemaBytes() { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java index e78e190e6..8d327b59d 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java @@ -26,7 +26,7 @@ /** * Container class for protobuf schema. * Protobuf schemas are represented using {@link com.google.protobuf.DescriptorProtos.FileDescriptorSet}. - * + * * @param Type of element. */ @Data @@ -35,7 +35,7 @@ public class ProtobufSchema implements SchemaContainer { private final Parser parser; @Getter private final DescriptorProtos.FileDescriptorSet descriptorProto; - + private final SchemaInfo schemaInfo; private ProtobufSchema(String name, Parser parser, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { @@ -43,7 +43,16 @@ private ProtobufSchema(String name, Parser parser, DescriptorProtos.FileDescr this.descriptorProto = fileDescriptorSet; this.schemaInfo = new SchemaInfo(name, SerializationFormat.Protobuf, getSchemaBytes(), ImmutableMap.of()); } - + + @SneakyThrows + private ProtobufSchema(SchemaInfo schemaInfo) { + DescriptorProtos.FileDescriptorSet fileDescriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaInfo.getSchemaData()); + + this.parser = null; + this.descriptorProto = fileDescriptorSet; + this.schemaInfo = schemaInfo; + } + private ByteBuffer getSchemaBytes() { return ByteBuffer.wrap(descriptorProto.toByteArray()); } @@ -55,7 +64,7 @@ public SchemaInfo getSchemaInfo() { /** * Method to generate protobuf schema from the supplied protobuf generated class and {@link DescriptorProtos.FileDescriptorSet}. - * + * * @param tClass Class for code generated protobuf message. * @param fileDescriptorSet file descriptor set representing a protobuf schema. * @param Type of protobuf message @@ -66,7 +75,7 @@ public SchemaInfo getSchemaInfo() { public static ProtobufSchema of(Class tClass, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { T defaultInstance = (T) tClass.getMethod("getDefaultInstance").invoke(null); Parser tParser = (Parser) defaultInstance.getParserForType(); - return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getFullName(), tParser, fileDescriptorSet); + return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getName(), tParser, fileDescriptorSet); } /** @@ -102,7 +111,7 @@ public static ProtobufSchema ofBaseType(Class< T defaultInstance = (T) tDerivedClass.getMethod("getDefaultInstance").invoke(null); Parser tParser = (Parser) defaultInstance.getParserForType(); - return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getFullName(), tParser, fileDescriptorSet); + return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getName(), tParser, fileDescriptorSet); } /** @@ -114,9 +123,7 @@ public static ProtobufSchema ofBaseType(Class< @SneakyThrows @SuppressWarnings("unchecked") public static ProtobufSchema from(SchemaInfo schemaInfo) { - DescriptorProtos.FileDescriptorSet fileDescriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaInfo.getSchemaData()); - - return new ProtobufSchema<>(schemaInfo.getType(), null, fileDescriptorSet); + return new ProtobufSchema<>(schemaInfo); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java index 165a7fe64..6a1ddb599 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java @@ -18,6 +18,8 @@ import java.io.InputStream; import java.util.Map; +import static io.pravega.schemaregistry.common.NameUtil.extractName; + class MultiplexedDeserializer extends AbstractPravegaDeserializer { private final Map> deserializers; @@ -40,9 +42,4 @@ protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, Schema .getValue(); return deserializer.deserialize(inputStream, writerSchema, readerSchema); } - - private String extractName(String qualifiedName) { - int nameStart = qualifiedName.lastIndexOf("."); - return qualifiedName.substring(nameStart + 1); - } } \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java index d01b7a470..8740dafb7 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java @@ -18,6 +18,7 @@ import com.google.protobuf.DynamicMessage; import io.pravega.schemaregistry.cache.EncodingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.common.NameUtil; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.ProtobufSchema; import lombok.SneakyThrows; @@ -40,9 +41,9 @@ public Descriptors.Descriptor load(SchemaInfo schemaToUse) throws Exception { DescriptorProtos.FileDescriptorSet descriptorSet = ProtobufSchema.from(schemaToUse).getDescriptorProto(); int count = descriptorSet.getFileCount(); - int nameStart = schemaToUse.getType().lastIndexOf("."); - String name = schemaToUse.getType().substring(nameStart + 1); - String pckg = nameStart < 0 ? "" : schemaToUse.getType().substring(0, nameStart); + String[] tokens = NameUtil.extractNameAndQualifier(schemaToUse.getType()); + String name = tokens[0]; + String pckg = tokens[1]; DescriptorProtos.FileDescriptorProto mainDescriptor = descriptorSet.getFileList().stream() .filter(x -> x.getPackage().startsWith(pckg) && x.getMessageTypeList().stream().anyMatch(y -> y.getName().equals(name))) From 58e7f6558bf1a7d6e16973f120b800c7ef1fa7ae Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 11 Jun 2020 04:43:10 -0700 Subject: [PATCH 08/70] removing all unwanted auto generated swagger files Signed-off-by: Shivesh Ranjan --- .../client/SchemaRegistryClient.java | 38 +- .../contract/data/GroupHistoryRecord.java | 2 +- .../contract/data/SchemaValidationRules.java | 1 + .../rest/server/api/ApiException.java | 10 - .../rest/server/api/ApiOriginFilter.java | 22 - .../rest/server/api/ApiResponseMessage.java | 69 --- .../generated/rest/server/api/Bootstrap.java | 31 -- .../generated/rest/server/api/GroupsApi.java | 412 ------------------ .../rest/server/api/GroupsApiService.java | 54 --- .../rest/server/api/JacksonJsonProvider.java | 18 - .../rest/server/api/NotFoundException.java | 10 - .../generated/rest/server/api/SchemasApi.java | 74 ---- .../rest/server/api/SchemasApiService.java | 22 - .../generated/rest/server/api/StringUtil.java | 41 -- .../factories/GroupsApiServiceFactory.java | 13 - .../factories/SchemasApiServiceFactory.java | 13 - .../server/api/impl/GroupsApiServiceImpl.java | 134 ------ .../api/impl/SchemasApiServiceImpl.java | 26 -- .../schemaregistry/contract/v1/ApiV1.java | 37 +- 19 files changed, 41 insertions(+), 986 deletions(-) delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiException.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiOriginFilter.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiResponseMessage.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/Bootstrap.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApi.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApiService.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/JacksonJsonProvider.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/NotFoundException.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApi.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApiService.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/GroupsApiServiceFactory.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/SchemasApiServiceFactory.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/GroupsApiServiceImpl.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/SchemasApiServiceImpl.java diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java index 5ed87a3aa..d0864b385 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java @@ -118,7 +118,8 @@ boolean updateSchemaValidationRules(String groupId, SchemaValidationRules valida * If group is configured with {@link GroupProperties#allowMultipleTypes} then multiple schemas with distinct * type {@link SchemaInfo#type} could be registered. * All schemas with same type are assigned monotonically increasing version numbers. - * Add schema api is idempotent. If a schema is already registered, its version info is returned by the service. + * Implementation of this method is expected to be idempotent. The behaviour of Add Schema API on the schema registry + * service is idempotent. If a schema is already registered, its version info is returned by the service. * * @param groupId Id for the group. * @param schemaInfo Schema to add. @@ -136,12 +137,13 @@ VersionInfo addSchema(String groupId, SchemaInfo schemaInfo) throws SchemaValida MalformedSchemaException, ResourceNotFoundException, UnauthorizedException; /** - * Api to delete schema corresponding to the version. Users should be very careful while using this API in production, + * Deletes the schema associated to the given version. Users should be very careful while using this API in production, * esp if the schema has already been used to write the data. - * Delete schema api is idempotent. - * This does a soft delete of the schema. So getSchemaVersion with the version info will still return the schema. + * An implementation of the delete call is expected to be idempotent. The behaviour of delete schema API invocation + * with the schema registry service is idempotent. + * The service performs a soft delete of the schema. So getSchemaVersion with the version info will still return the schema. * However, the schema will not participate in any compatibility checks once deleted. - * It will not be included in listing schema versions for the group using apis like {@link SchemaRegistryClient#getSchemaVersions} + * It will not be included in listing schema versions for the group using APIs like {@link SchemaRegistryClient#getSchemaVersions} * or {@link SchemaRegistryClient#getGroupHistory} or {@link SchemaRegistryClient#getSchemas} or * {@link SchemaRegistryClient#getLatestSchemaVersion} * If add schema is called again using this deleted schema will result in a new version being assigned to it upon registration. @@ -154,15 +156,16 @@ VersionInfo addSchema(String groupId, SchemaInfo schemaInfo) throws SchemaValida void deleteSchemaVersion(String groupId, VersionInfo versionInfo) throws ResourceNotFoundException, UnauthorizedException; /** - * Api to delete schema corresponding to the schemaType and version. - * Users should be very careful while using this API in production, esp if the schema has already been used to write the data. - * Delete schema api is idempotent. - * This does a soft delete of the schema. So getSchemaVersion with the version info will still return the schema. + * Deletes the schema associated to the given version. Users should be very careful while using this API in production, + * esp if the schema has already been used to write the data. + * An implementation of the delete call is expected to be idempotent. The behaviour of delete schema API invocation + * with the schema registry service is idempotent. + * The service performs a soft delete of the schema. So getSchemaVersion with the version info will still return the schema. * However, the schema will not participate in any compatibility checks once deleted. - * It will not be included in listing schema versions for the group using apis like {@link SchemaRegistryClient#getSchemaVersions} + * It will not be included in listing schema versions for the group using APIs like {@link SchemaRegistryClient#getSchemaVersions} * or {@link SchemaRegistryClient#getGroupHistory} or {@link SchemaRegistryClient#getSchemas} or * {@link SchemaRegistryClient#getLatestSchemaVersion} - * If add schema is called again using this deleted schema will result in a new version being assigned to upon registration. + * If add schema is called again using this deleted schema will result in a new version being assigned to it upon registration. * * @param groupId Id for the group. * @param schemaType schemaType that identifies the type of object the schema represents. This should be same as the @@ -219,8 +222,9 @@ VersionInfo addSchema(String groupId, SchemaInfo schemaInfo) throws SchemaValida * Gets an encoding id that uniquely identifies a combination of Schema version and codec type. * This encoding id is a 4 byte integer and it can be used to tag the data which is serialized and encoded using the * schema version and codecType identified by this encoding id. - * This api is idempotent. And if an encoding id is generated for a version and codec pair, subsequent requests to this - * api will return the generated encoding id. + * The implementation of this method is expected to be idempotent. The corresponding GetEncodingId API on schema registry + * service is idempotent and will generate a new encoding id for each unique version and codecType pair only once. + * Subsequent requests to get the encoding id for the codecType and version will return the previously generated id. * If the schema identified by the version is deleted using {@link SchemaRegistryClient#deleteSchemaVersion} api, * then if the encoding id was already generated for the pair of schema version and codec, then it will be returned. * However, if no encoding id for the versioninfo and codec pair was generated and the schema version was deleted, @@ -283,14 +287,14 @@ SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schema /** * Checks whether given schema is valid by applying validation rules against previous schemas in the group * subject to current {@link GroupProperties#schemaValidationRules} policy. - * This api performs exactly the same validations as {@link SchemaRegistryClient#addSchema(String, SchemaInfo)} + * The invocation of this method will perform exactly the same validations as {@link SchemaRegistryClient#addSchema(String, SchemaInfo)} * but without registering the schema. This is primarily intended to be used during schema development phase to validate that * the changes to schema are in compliance with validation rules for the group. * * @param groupId Id for the group. * @param schemaInfo Schema to check for validity. * @return A schema is valid if it passes all the {@link GroupProperties#schemaValidationRules}. The rule supported - * presently, is Compatibility. If desired compatibility is satisfied by the schema then this api returns true, false otherwise. + * presently, is Compatibility. If desired compatibility is satisfied by the schema then this method returns true, false otherwise. * @throws ResourceNotFoundException if group is not found. * @throws UnauthorizedException if the user is unauthorized. */ @@ -345,8 +349,8 @@ SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schema /** * Finds all groups and corresponding version info for the groups where the supplied schema has been registered. * It is important to note that the same schema type could be part of multiple group, however in each group it - * may have gone through a separate evolution. This api simply identifies all groups where the specific schema - * (type, format and binary) is used. + * may have gone through a separate evolution. Invocation of this method lists all groups where the specific schema + * (type, format and binary) is used along with versions that identifies this schema in those groups. * The user defined {@link SchemaInfo#properties} is not used for comparison. * * @param schemaInfo Schema info to find references for. diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java index 4b9a3d257..9ae08dacc 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java @@ -30,7 +30,7 @@ public class GroupHistoryRecord { */ private final VersionInfo version; /** - * Validation rules that were applied at the time when the schema was registered. + * Validation rules applied at the time when the schema was registered. */ private final SchemaValidationRules rules; /** diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java index 06f77fb09..805171710 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java @@ -51,6 +51,7 @@ public static SchemaValidationRules of(Compatibility compatibility) { * Method to create SchemaValidationRules from the list of supplied rules. If multiple same rule are present * in the list then only the latest rule of each type is added to the Rules map. * Currently the only rule supported is {@link Compatibility}. + * * @param rules List of rules. * @return SchemaValidationRules object. */ diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiException.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiException.java deleted file mode 100644 index 096b7c1d1..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiException.java +++ /dev/null @@ -1,10 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api; - - -public class ApiException extends Exception{ - private int code; - public ApiException (int code, String msg) { - super(msg); - this.code = code; - } -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiOriginFilter.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiOriginFilter.java deleted file mode 100644 index 1ad2cce34..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiOriginFilter.java +++ /dev/null @@ -1,22 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api; - -import java.io.IOException; - -import javax.servlet.*; -import javax.servlet.http.HttpServletResponse; - - -public class ApiOriginFilter implements javax.servlet.Filter { - public void doFilter(ServletRequest request, ServletResponse response, - FilterChain chain) throws IOException, ServletException { - HttpServletResponse res = (HttpServletResponse) response; - res.addHeader("Access-Control-Allow-Origin", "*"); - res.addHeader("Access-Control-Allow-Methods", "GET, POST, DELETE, PUT"); - res.addHeader("Access-Control-Allow-Headers", "Content-Type"); - chain.doFilter(request, response); - } - - public void destroy() {} - - public void init(FilterConfig filterConfig) throws ServletException {} -} \ No newline at end of file diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiResponseMessage.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiResponseMessage.java deleted file mode 100644 index 47e3f5d76..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/ApiResponseMessage.java +++ /dev/null @@ -1,69 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api; - -import javax.xml.bind.annotation.XmlTransient; - -@javax.xml.bind.annotation.XmlRootElement - -public class ApiResponseMessage { - public static final int ERROR = 1; - public static final int WARNING = 2; - public static final int INFO = 3; - public static final int OK = 4; - public static final int TOO_BUSY = 5; - - int code; - String type; - String message; - - public ApiResponseMessage(){} - - public ApiResponseMessage(int code, String message){ - this.code = code; - switch(code){ - case ERROR: - setType("error"); - break; - case WARNING: - setType("warning"); - break; - case INFO: - setType("info"); - break; - case OK: - setType("ok"); - break; - case TOO_BUSY: - setType("too busy"); - break; - default: - setType("unknown"); - break; - } - this.message = message; - } - - @XmlTransient - public int getCode() { - return code; - } - - public void setCode(int code) { - this.code = code; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public String getMessage() { - return message; - } - - public void setMessage(String message) { - this.message = message; - } -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/Bootstrap.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/Bootstrap.java deleted file mode 100644 index deb52b674..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/Bootstrap.java +++ /dev/null @@ -1,31 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api; - -import io.swagger.jaxrs.config.SwaggerContextService; -import io.swagger.models.*; - -import io.swagger.models.auth.*; - -import javax.servlet.http.HttpServlet; -import javax.servlet.ServletContext; -import javax.servlet.ServletConfig; -import javax.servlet.ServletException; - -public class Bootstrap extends HttpServlet { - @Override - public void init(ServletConfig config) throws ServletException { - Info info = new Info() - .title("Swagger Server") - .description("REST APIs for Pravega Schema Registry.") - .termsOfService("") - .contact(new Contact() - .email("")) - .license(new License() - .name("Apache 2.0") - .url("http://www.apache.org/licenses/LICENSE-2.0")); - - ServletContext context = config.getServletContext(); - Swagger swagger = new Swagger().info(info); - - new SwaggerContextService().withServletConfig(config).updateSwagger(swagger); - } -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApi.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApi.java deleted file mode 100644 index 16db9f378..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApi.java +++ /dev/null @@ -1,412 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api; - -import io.pravega.schemaregistry.contract.generated.rest.model.*; -import io.pravega.schemaregistry.contract.generated.rest.server.api.GroupsApiService; -import io.pravega.schemaregistry.contract.generated.rest.server.api.factories.GroupsApiServiceFactory; - -import io.swagger.annotations.ApiParam; -import io.swagger.jaxrs.*; - -import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; -import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; -import io.pravega.schemaregistry.contract.generated.rest.model.CreateGroupRequest; -import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; -import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; -import io.pravega.schemaregistry.contract.generated.rest.model.GetEncodingIdRequest; -import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory; -import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; -import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; -import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; -import io.pravega.schemaregistry.contract.generated.rest.model.Valid; -import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; -import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; - -import java.util.Map; -import java.util.List; -import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; - -import java.io.InputStream; - -import org.glassfish.jersey.media.multipart.FormDataContentDisposition; -import org.glassfish.jersey.media.multipart.FormDataParam; - -import javax.servlet.ServletConfig; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.SecurityContext; -import javax.ws.rs.*; -import javax.validation.constraints.*; - -@Path("/groups") - - -@io.swagger.annotations.Api(description = "the groups API") - -public class GroupsApi { - private final GroupsApiService delegate; - - public GroupsApi(@Context ServletConfig servletContext) { - GroupsApiService delegate = null; - - if (servletContext != null) { - String implClass = servletContext.getInitParameter("GroupsApi.implementation"); - if (implClass != null && !"".equals(implClass.trim())) { - try { - delegate = (GroupsApiService) Class.forName(implClass).newInstance(); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - } - - if (delegate == null) { - delegate = GroupsApiServiceFactory.getGroupsApi(); - } - - this.delegate = delegate; - } - - @POST - @Path("/{groupName}/codecTypes") - @Consumes({ "application/json" }) - - @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new codecType to the group.", response = Void.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added codecType to group", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while registering codectype to a Group", response = Void.class) }) - public Response addCodecType(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@ApiParam(value = "The codecType" ,required=true) String codecType -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.addCodecType(groupName,codecType,securityContext); - } - @POST - @Path("/{groupName}/schemas/versions") - @Consumes({ "application/json" }) - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new schema to the group", response = VersionInfo.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added schema to the group", response = VersionInfo.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 409, message = "Incompatible schema", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 417, message = "Invalid serialization format", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while adding schema to group", response = Void.class) }) - public Response addSchema(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@ApiParam(value = "Add new schema to group" ,required=true) SchemaInfo schemaInfo -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.addSchema(groupName,schemaInfo,securityContext); - } - @POST - @Path("/{groupName}/schemas/versions/canRead") - @Consumes({ "application/json" }) - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema can be used for reads subject to compatibility policy in the schema validation rules.", response = CanRead.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "Response to tell whether schema can be used to read existing schemas", response = CanRead.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while checking schema for readability", response = Void.class) }) - public Response canRead(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@ApiParam(value = "Checks if schema can be used to read the data in the stream based on compatibility rules." ,required=true) SchemaInfo schemaInfo -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.canRead(groupName,schemaInfo,securityContext); - } - @POST - - @Consumes({ "application/json" }) - - @io.swagger.annotations.ApiOperation(value = "", notes = "Create a new Group", response = Void.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added group", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 409, message = "Group with given name already exists", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while creating a Group", response = Void.class) }) - public Response createGroup(@ApiParam(value = "The Group configuration" ,required=true) CreateGroupRequest createGroupRequest -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.createGroup(createGroupRequest,securityContext); - } - @DELETE - @Path("/{groupName}") - - - @io.swagger.annotations.ApiOperation(value = "", notes = "Delete a Group", response = Void.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 204, message = "Successfully deleted the Group", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting the Group", response = Void.class) }) - public Response deleteGroup(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.deleteGroup(groupName,securityContext); - } - @DELETE - @Path("/{groupName}/schemas/{type}/versions/{version}") - - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema version from the group.", response = Void.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class) }) - public Response deleteSchemaVersion(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@ApiParam(value = "Schema type from SchemaInfo#type or VersionInfo#type",required=true) @PathParam("type") String type -,@ApiParam(value = "Version number",required=true) @PathParam("version") Integer version -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.deleteSchemaVersion(groupName,type,version,securityContext); - } - @DELETE - @Path("/{groupName}/schemas/versions/{versionOrdinal}") - - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema identified by version from the group.", response = Void.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class) }) - public Response deleteSchemaVersionOrinal(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@ApiParam(value = "Version ordinal",required=true) @PathParam("versionOrdinal") Integer versionOrdinal -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.deleteSchemaVersionOrinal(groupName,versionOrdinal,securityContext); - } - @GET - @Path("/{groupName}/codecTypes") - - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Get codecTypes for the group.", response = CodecTypesList.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "Found CodecTypes", response = CodecTypesList.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching codecTypes registered", response = Void.class) }) - public Response getCodecTypesList(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.getCodecTypesList(groupName,securityContext); - } - @PUT - @Path("/{groupName}/encodings") - @Consumes({ "application/json" }) - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Get an encoding id that uniquely identifies a schema version and codec type pair.", response = EncodingId.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "Found Encoding", response = EncodingId.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name or version not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 412, message = "Codec type not registered", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding id", response = Void.class) }) - public Response getEncodingId(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@ApiParam(value = "Get schema corresponding to the version" ,required=true) GetEncodingIdRequest getEncodingIdRequest -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.getEncodingId(groupName,getEncodingIdRequest,securityContext); - } - @GET - @Path("/{groupName}/encodings/{encodingId}") - - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Get the encoding information corresponding to the encoding id.", response = EncodingInfo.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "Found Encoding", response = EncodingInfo.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding info corresponding to encoding id", response = Void.class) }) - public Response getEncodingInfo(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@ApiParam(value = "Encoding id that identifies a unique combination of schema and codec type",required=true) @PathParam("encodingId") Integer encodingId -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.getEncodingInfo(groupName,encodingId,securityContext); - } - @GET - @Path("/{groupName}/history") - - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch the history of schema evolution of a Group", response = GroupHistory.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group history", response = GroupHistory.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group history", response = Void.class) }) - public Response getGroupHistory(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.getGroupHistory(groupName,securityContext); - } - @GET - @Path("/{groupName}") - - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch the properties of an existing Group", response = GroupProperties.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group properties", response = GroupProperties.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class) }) - public Response getGroupProperties(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.getGroupProperties(groupName,securityContext); - } - @GET - @Path("/{groupName}/schemas/{type}/versions/{version}") - - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class) }) - public Response getSchemaFromVersion(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@ApiParam(value = "Schema type from SchemaInfo#type or VersionInfo#type",required=true) @PathParam("type") String type -,@ApiParam(value = "Version number",required=true) @PathParam("version") Integer version -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.getSchemaFromVersion(groupName,type,version,securityContext); - } - @GET - @Path("/{groupName}/schemas/versions/{versionOrdinal}") - - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class) }) - public Response getSchemaFromVersionOrdinal(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@ApiParam(value = "Version ordinal",required=true) @PathParam("versionOrdinal") Integer versionOrdinal -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.getSchemaFromVersionOrdinal(groupName,versionOrdinal,securityContext); - } - @POST - @Path("/{groupName}/schemas/versions/find") - @Consumes({ "application/json" }) - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Get the version for the schema if it is registered. It does not automatically register the schema. To add new schema use addSchema", response = VersionInfo.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "Schema version", response = VersionInfo.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error fetching version for schema", response = Void.class) }) - public Response getSchemaVersion(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@ApiParam(value = "Get schema corresponding to the version" ,required=true) SchemaInfo schemaInfo -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.getSchemaVersion(groupName,schemaInfo,securityContext); - } - @GET - @Path("/{groupName}/schemas/versions") - - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Get all schema versions for the group", response = SchemaVersionsList.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "Versioned history of schemas registered under the group", response = SchemaVersionsList.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group schema versions", response = Void.class) }) - public Response getSchemaVersions(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@ApiParam(value = "Type of object the schema describes.") @QueryParam("type") String type -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.getSchemaVersions(groupName,type,securityContext); - } - @GET - @Path("/{groupName}/schemas") - - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Fetch latest schema versions for all objects identified by SchemaInfo#type under a Group. If query param type is specified then latest schema for the type is returned.", response = SchemaVersionsList.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "Latest schemas for all objects identified by SchemaInfo#type under the group", response = SchemaVersionsList.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group's latest schemas", response = Void.class) }) - public Response getSchemas(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@ApiParam(value = "Type of object") @QueryParam("type") String type -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.getSchemas(groupName,type,securityContext); - } - @GET - - - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "List all groups", response = ListGroupsResponse.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "List of all groups", response = ListGroupsResponse.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching the list of Groups", response = Void.class) }) - public Response listGroups(@ApiParam(value = "Continuation token") @QueryParam("continuationToken") String continuationToken -,@ApiParam(value = "The numbers of items to return") @QueryParam("limit") Integer limit -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.listGroups(continuationToken,limit,securityContext); - } - @PUT - @Path("/{groupName}/rules") - @Consumes({ "application/json" }) - - @io.swagger.annotations.ApiOperation(value = "", notes = "update schema validation rules of an existing Group", response = Void.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "Updated schema validation policy", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 409, message = "Write conflict", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while updating Group's schema validation rules", response = Void.class) }) - public Response updateSchemaValidationRules(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@ApiParam(value = "update group policy" ,required=true) UpdateValidationRulesRequest updateValidationRulesRequest -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.updateSchemaValidationRules(groupName,updateValidationRulesRequest,securityContext); - } - @POST - @Path("/{groupName}/schemas/versions/validate") - @Consumes({ "application/json" }) - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema is compatible with schemas in the registry for current policy setting.", response = Valid.class, tags={ "Group", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "Schema validation response", response = Valid.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while trying to validate schema", response = Void.class) }) - public Response validate(@ApiParam(value = "Group name",required=true) @PathParam("groupName") String groupName -,@ApiParam(value = "Checks if schema is valid with respect to supplied validation rules" ,required=true) ValidateRequest validateRequest -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.validate(groupName,validateRequest,securityContext); - } -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApiService.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApiService.java deleted file mode 100644 index dd8d9ef40..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/GroupsApiService.java +++ /dev/null @@ -1,54 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api; - -import io.pravega.schemaregistry.contract.generated.rest.server.api.*; -import io.pravega.schemaregistry.contract.generated.rest.model.*; - -import org.glassfish.jersey.media.multipart.FormDataContentDisposition; - -import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; -import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; -import io.pravega.schemaregistry.contract.generated.rest.model.CreateGroupRequest; -import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; -import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; -import io.pravega.schemaregistry.contract.generated.rest.model.GetEncodingIdRequest; -import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory; -import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; -import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; -import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; -import io.pravega.schemaregistry.contract.generated.rest.model.Valid; -import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; -import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; - -import java.util.List; -import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; - -import java.io.InputStream; - -import javax.ws.rs.core.Response; -import javax.ws.rs.core.SecurityContext; -import javax.validation.constraints.*; - -public abstract class GroupsApiService { - public abstract Response addCodecType(String groupName,String codecType,SecurityContext securityContext) throws NotFoundException; - public abstract Response addSchema(String groupName,SchemaInfo schemaInfo,SecurityContext securityContext) throws NotFoundException; - public abstract Response canRead(String groupName,SchemaInfo schemaInfo,SecurityContext securityContext) throws NotFoundException; - public abstract Response createGroup(CreateGroupRequest createGroupRequest,SecurityContext securityContext) throws NotFoundException; - public abstract Response deleteGroup(String groupName,SecurityContext securityContext) throws NotFoundException; - public abstract Response deleteSchemaVersion(String groupName,String type,Integer version,SecurityContext securityContext) throws NotFoundException; - public abstract Response deleteSchemaVersionOrinal(String groupName,Integer versionOrdinal,SecurityContext securityContext) throws NotFoundException; - public abstract Response getCodecTypesList(String groupName,SecurityContext securityContext) throws NotFoundException; - public abstract Response getEncodingId(String groupName,GetEncodingIdRequest getEncodingIdRequest,SecurityContext securityContext) throws NotFoundException; - public abstract Response getEncodingInfo(String groupName,Integer encodingId,SecurityContext securityContext) throws NotFoundException; - public abstract Response getGroupHistory(String groupName,SecurityContext securityContext) throws NotFoundException; - public abstract Response getGroupProperties(String groupName,SecurityContext securityContext) throws NotFoundException; - public abstract Response getSchemaFromVersion(String groupName,String type,Integer version,SecurityContext securityContext) throws NotFoundException; - public abstract Response getSchemaFromVersionOrdinal(String groupName,Integer versionOrdinal,SecurityContext securityContext) throws NotFoundException; - public abstract Response getSchemaVersion(String groupName,SchemaInfo schemaInfo,SecurityContext securityContext) throws NotFoundException; - public abstract Response getSchemaVersions(String groupName, String type,SecurityContext securityContext) throws NotFoundException; - public abstract Response getSchemas(String groupName, String type,SecurityContext securityContext) throws NotFoundException; - public abstract Response listGroups( String continuationToken, Integer limit,SecurityContext securityContext) throws NotFoundException; - public abstract Response updateSchemaValidationRules(String groupName,UpdateValidationRulesRequest updateValidationRulesRequest,SecurityContext securityContext) throws NotFoundException; - public abstract Response validate(String groupName,ValidateRequest validateRequest,SecurityContext securityContext) throws NotFoundException; -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/JacksonJsonProvider.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/JacksonJsonProvider.java deleted file mode 100644 index e6179d25f..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/JacksonJsonProvider.java +++ /dev/null @@ -1,18 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api; - -import com.fasterxml.jackson.databind.ObjectMapper; -import io.swagger.util.Json; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.ext.Provider; -import org.glassfish.jersey.jackson.internal.jackson.jaxrs.json.JacksonJaxbJsonProvider; - -@Provider -@Produces({MediaType.APPLICATION_JSON}) -public class JacksonJsonProvider extends JacksonJaxbJsonProvider { - private static ObjectMapper commonMapper = Json.mapper(); - - public JacksonJsonProvider() { - super.setMapper(commonMapper); - } -} \ No newline at end of file diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/NotFoundException.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/NotFoundException.java deleted file mode 100644 index e9d99721b..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/NotFoundException.java +++ /dev/null @@ -1,10 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api; - - -public class NotFoundException extends ApiException { - private int code; - public NotFoundException (int code, String msg) { - super(code, msg); - this.code = code; - } -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApi.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApi.java deleted file mode 100644 index 295bd1d86..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApi.java +++ /dev/null @@ -1,74 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api; - -import io.pravega.schemaregistry.contract.generated.rest.model.*; -import io.pravega.schemaregistry.contract.generated.rest.server.api.SchemasApiService; -import io.pravega.schemaregistry.contract.generated.rest.server.api.factories.SchemasApiServiceFactory; - -import io.swagger.annotations.ApiParam; -import io.swagger.jaxrs.*; - -import io.pravega.schemaregistry.contract.generated.rest.model.AddedTo; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; - -import java.util.Map; -import java.util.List; -import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; - -import java.io.InputStream; - -import org.glassfish.jersey.media.multipart.FormDataContentDisposition; -import org.glassfish.jersey.media.multipart.FormDataParam; - -import javax.servlet.ServletConfig; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.SecurityContext; -import javax.ws.rs.*; -import javax.validation.constraints.*; - -@Path("/schemas") - - -@io.swagger.annotations.Api(description = "the schemas API") - -public class SchemasApi { - private final SchemasApiService delegate; - - public SchemasApi(@Context ServletConfig servletContext) { - SchemasApiService delegate = null; - - if (servletContext != null) { - String implClass = servletContext.getInitParameter("SchemasApi.implementation"); - if (implClass != null && !"".equals(implClass.trim())) { - try { - delegate = (SchemasApiService) Class.forName(implClass).newInstance(); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - } - - if (delegate == null) { - delegate = SchemasApiServiceFactory.getSchemasApi(); - } - - this.delegate = delegate; - } - - @POST - @Path("/addedTo") - @Consumes({ "application/json" }) - @Produces({ "application/json" }) - @io.swagger.annotations.ApiOperation(value = "", notes = "Gets a map of groups to version info where the schema if it is registered. SchemaInfo#properties is ignored while comparing the schema.", response = AddedTo.class, tags={ "Schema", }) - @io.swagger.annotations.ApiResponses(value = { - @io.swagger.annotations.ApiResponse(code = 200, message = "Schema version", response = AddedTo.class), - - @io.swagger.annotations.ApiResponse(code = 404, message = "Schema not found", response = Void.class), - - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Schema references", response = Void.class) }) - public Response getSchemaReferences(@ApiParam(value = "Get schema references for the supplied schema" ,required=true) SchemaInfo schemaInfo -,@Context SecurityContext securityContext) - throws NotFoundException { - return delegate.getSchemaReferences(schemaInfo,securityContext); - } -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApiService.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApiService.java deleted file mode 100644 index bcc19dd03..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/SchemasApiService.java +++ /dev/null @@ -1,22 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api; - -import io.pravega.schemaregistry.contract.generated.rest.server.api.*; -import io.pravega.schemaregistry.contract.generated.rest.model.*; - -import org.glassfish.jersey.media.multipart.FormDataContentDisposition; - -import io.pravega.schemaregistry.contract.generated.rest.model.AddedTo; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; - -import java.util.List; -import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; - -import java.io.InputStream; - -import javax.ws.rs.core.Response; -import javax.ws.rs.core.SecurityContext; -import javax.validation.constraints.*; - -public abstract class SchemasApiService { - public abstract Response getSchemaReferences(SchemaInfo schemaInfo,SecurityContext securityContext) throws NotFoundException; -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java deleted file mode 100644 index 1051f6941..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/StringUtil.java +++ /dev/null @@ -1,41 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api; - - -public class StringUtil { - /** - * Check if the given array contains the given value (with case-insensitive comparison). - * - * @param array The array - * @param value The value to search - * @return true if the array contains the value - */ - public static boolean containsIgnoreCase(String[] array, String value) { - for (String str : array) { - if (value == null && str == null) return true; - if (value != null && value.equalsIgnoreCase(str)) return true; - } - return false; - } - - /** - * Join an array of strings with the given separator. - * - * Note: This might be replaced by utility method from commons-lang or guava someday - * if one of those libraries is added as dependency. - * - * @param array The array of strings - * @param separator The separator - * @return the resulting string - */ - public static String join(String[] array, String separator) { - int len = array.length; - if (len == 0) return ""; - - StringBuilder out = new StringBuilder(); - out.append(array[0]); - for (int i = 1; i < len; i++) { - out.append(separator).append(array[i]); - } - return out.toString(); - } -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/GroupsApiServiceFactory.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/GroupsApiServiceFactory.java deleted file mode 100644 index 3145181ad..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/GroupsApiServiceFactory.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api.factories; - -import io.pravega.schemaregistry.contract.generated.rest.server.api.GroupsApiService; -import io.pravega.schemaregistry.contract.generated.rest.server.api.impl.GroupsApiServiceImpl; - - -public class GroupsApiServiceFactory { - private final static GroupsApiService service = new GroupsApiServiceImpl(); - - public static GroupsApiService getGroupsApi() { - return service; - } -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/SchemasApiServiceFactory.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/SchemasApiServiceFactory.java deleted file mode 100644 index 8587b6fef..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/factories/SchemasApiServiceFactory.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api.factories; - -import io.pravega.schemaregistry.contract.generated.rest.server.api.SchemasApiService; -import io.pravega.schemaregistry.contract.generated.rest.server.api.impl.SchemasApiServiceImpl; - - -public class SchemasApiServiceFactory { - private final static SchemasApiService service = new SchemasApiServiceImpl(); - - public static SchemasApiService getSchemasApi() { - return service; - } -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/GroupsApiServiceImpl.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/GroupsApiServiceImpl.java deleted file mode 100644 index 97aea99a9..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/GroupsApiServiceImpl.java +++ /dev/null @@ -1,134 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api.impl; - -import io.pravega.schemaregistry.contract.generated.rest.server.api.*; -import io.pravega.schemaregistry.contract.generated.rest.model.*; - -import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; -import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; -import io.pravega.schemaregistry.contract.generated.rest.model.CreateGroupRequest; -import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; -import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; -import io.pravega.schemaregistry.contract.generated.rest.model.GetEncodingIdRequest; -import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistory; -import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; -import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; -import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; -import io.pravega.schemaregistry.contract.generated.rest.model.Valid; -import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; -import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; - -import java.util.List; -import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; - -import java.io.InputStream; - -import org.glassfish.jersey.media.multipart.FormDataContentDisposition; - -import javax.ws.rs.core.Response; -import javax.ws.rs.core.SecurityContext; -import javax.validation.constraints.*; - -public class GroupsApiServiceImpl extends GroupsApiService { - @Override - public Response addCodecType(String groupName, String codecType, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response addSchema(String groupName, SchemaInfo schemaInfo, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response canRead(String groupName, SchemaInfo schemaInfo, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response createGroup(CreateGroupRequest createGroupRequest, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response deleteGroup(String groupName, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response deleteSchemaVersion(String groupName, String type, Integer version, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response deleteSchemaVersionOrinal(String groupName, Integer versionOrdinal, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response getCodecTypesList(String groupName, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response getEncodingId(String groupName, GetEncodingIdRequest getEncodingIdRequest, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response getEncodingInfo(String groupName, Integer encodingId, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response getGroupHistory(String groupName, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response getGroupProperties(String groupName, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response getSchemaFromVersion(String groupName, String type, Integer version, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response getSchemaFromVersionOrdinal(String groupName, Integer versionOrdinal, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response getSchemaVersion(String groupName, SchemaInfo schemaInfo, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response getSchemaVersions(String groupName, String type, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response getSchemas(String groupName, String type, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response listGroups( String continuationToken, Integer limit, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response updateSchemaValidationRules(String groupName, UpdateValidationRulesRequest updateValidationRulesRequest, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } - @Override - public Response validate(String groupName, ValidateRequest validateRequest, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/SchemasApiServiceImpl.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/SchemasApiServiceImpl.java deleted file mode 100644 index 565f0f2fb..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/server/api/impl/SchemasApiServiceImpl.java +++ /dev/null @@ -1,26 +0,0 @@ -package io.pravega.schemaregistry.contract.generated.rest.server.api.impl; - -import io.pravega.schemaregistry.contract.generated.rest.server.api.*; -import io.pravega.schemaregistry.contract.generated.rest.model.*; - -import io.pravega.schemaregistry.contract.generated.rest.model.AddedTo; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; - -import java.util.List; -import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; - -import java.io.InputStream; - -import org.glassfish.jersey.media.multipart.FormDataContentDisposition; - -import javax.ws.rs.core.Response; -import javax.ws.rs.core.SecurityContext; -import javax.validation.constraints.*; - -public class SchemasApiServiceImpl extends SchemasApiService { - @Override - public Response getSchemaReferences(SchemaInfo schemaInfo, SecurityContext securityContext) throws NotFoundException { - // do some magic! - return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); - } -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java index 5c851f0f6..ec1ab7a39 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java @@ -25,7 +25,6 @@ import io.pravega.schemaregistry.contract.generated.rest.model.Valid; import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; -import io.pravega.schemaregistry.contract.generated.rest.server.api.NotFoundException; import io.swagger.annotations.ApiParam; import javax.ws.rs.Consumes; @@ -293,7 +292,7 @@ public interface GroupsApiAsync { @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while registering codectype to a Group", response = Void.class)}) void addCodecType(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "Add codec type", required = true) String codecType, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + @ApiParam(value = "Add codec type", required = true) String codecType, @Suspended AsyncResponse asyncResponse); @POST @Path("/{groupName}/schemas/versions") @@ -307,7 +306,7 @@ void addCodecType(@ApiParam(value = "Group name", required = true) @PathParam("g @io.swagger.annotations.ApiResponse(code = 417, message = "Invalid serialization format", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while adding a schema", response = Void.class)}) void addSchema(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "Add new schema to group", required = true) SchemaInfo schemaInfo, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + @ApiParam(value = "Add new schema to group", required = true) SchemaInfo schemaInfo, @Suspended AsyncResponse asyncResponse); @POST @Path("/{groupName}/schemas/versions/canRead") @@ -319,7 +318,7 @@ void addSchema(@ApiParam(value = "Group name", required = true) @PathParam("grou @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while checking schema for readability", response = Void.class)}) void canRead(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "Checks if schema can be used to read the data in the stream based on compatibility rules.", required = true) SchemaInfo schemaInfo, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + @ApiParam(value = "Checks if schema can be used to read the data in the stream based on compatibility rules.", required = true) SchemaInfo schemaInfo, @Suspended AsyncResponse asyncResponse); @POST @Consumes({"application/json"}) @@ -328,7 +327,7 @@ void canRead(@ApiParam(value = "Group name", required = true) @PathParam("groupN @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added group", response = Void.class), @io.swagger.annotations.ApiResponse(code = 409, message = "Group with given name already exists", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while creating a Group", response = Void.class)}) - void createGroup(@ApiParam(value = "The Group configuration", required = true) CreateGroupRequest createGroupRequest, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + void createGroup(@ApiParam(value = "The Group configuration", required = true) CreateGroupRequest createGroupRequest, @Suspended AsyncResponse asyncResponse); @DELETE @Path("/{groupName}") @@ -336,7 +335,7 @@ void canRead(@ApiParam(value = "Group name", required = true) @PathParam("groupN @io.swagger.annotations.ApiResponses(value = { @io.swagger.annotations.ApiResponse(code = 204, message = "Successfully deleted the Group", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting the Group", response = Void.class)}) - void deleteGroup(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + void deleteGroup(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse); @GET @Path("/{groupName}/codecTypes") @@ -346,7 +345,7 @@ void canRead(@ApiParam(value = "Group name", required = true) @PathParam("groupN @io.swagger.annotations.ApiResponse(code = 200, message = "Found CodecTypes", response = CodecTypesList.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching codecTypes registered", response = Void.class)}) - void getCodecTypesList(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + void getCodecTypesList(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse); @GET @Path("/{groupName}/encodings/{encodingId}") @@ -357,7 +356,7 @@ void canRead(@ApiParam(value = "Group name", required = true) @PathParam("groupN @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding info corresponding to encoding id", response = Void.class)}) void getEncodingInfo(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "Encoding id that identifies a unique combination of schema and codecType", required = true) @PathParam("encodingId") Integer encodingId, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + @ApiParam(value = "Encoding id that identifies a unique combination of schema and codecType", required = true) @PathParam("encodingId") Integer encodingId, @Suspended AsyncResponse asyncResponse); @GET @Path("/{groupName}") @@ -367,7 +366,7 @@ void getEncodingInfo(@ApiParam(value = "Group name", required = true) @PathParam @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group properties", response = GroupProperties.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) - void getGroupProperties(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + void getGroupProperties(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse); @GET @Path("/{groupName}/history") @@ -377,7 +376,7 @@ void getEncodingInfo(@ApiParam(value = "Group name", required = true) @PathParam @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group history", response = GroupHistory.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group history", response = Void.class)}) - void getGroupHistory(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + void getGroupHistory(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse); @GET @Path("/{groupName}/schemas/versions") @@ -389,7 +388,7 @@ void getEncodingInfo(@ApiParam(value = "Group name", required = true) @PathParam @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) void getSchemaVersions(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @ApiParam(value = "Type") @QueryParam("type") String type, - @Suspended AsyncResponse asyncResponse) throws NotFoundException; + @Suspended AsyncResponse asyncResponse); @GET @Path("/{groupName}/schemas") @@ -401,7 +400,7 @@ void getSchemaVersions(@ApiParam(value = "Group name", required = true) @PathPar @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group's latest schemas", response = Void.class)}) void getSchemas(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @ApiParam(value = "Type of object") @QueryParam("type") String type, - @Suspended AsyncResponse asyncResponse) throws NotFoundException; + @Suspended AsyncResponse asyncResponse); @PUT @Path("/{groupName}/encodings") @@ -414,7 +413,7 @@ void getSchemas(@ApiParam(value = "Group name", required = true) @PathParam("gro @io.swagger.annotations.ApiResponse(code = 412, message = "Codec type not registered", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding id", response = Void.class)}) void getEncodingId(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "Get schema corresponding to the version", required = true) GetEncodingIdRequest getEncodingIdRequest, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + @ApiParam(value = "Get schema corresponding to the version", required = true) GetEncodingIdRequest getEncodingIdRequest, @Suspended AsyncResponse asyncResponse); @GET @Path("/{groupName}/schemas/versions/{versionOrdinal}") @@ -425,7 +424,7 @@ void getEncodingId(@ApiParam(value = "Group name", required = true) @PathParam(" @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class)}) void getSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version, @Suspended AsyncResponse asyncResponse); @DELETE @Path("/{groupName}/schemas/versions/{versionOrdinal}") @@ -436,7 +435,7 @@ void getSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class)}) void deleteSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version, @Suspended AsyncResponse asyncResponse); @GET @Path("/{groupName}/schemas/{type}/versions/{version}") @@ -475,7 +474,7 @@ void deleteSchemaVersion(@ApiParam(value = "Group name", required = true) @PathP @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) void getSchemaVersion(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @ApiParam(value = "Get schema corresponding to the version", required = true) SchemaInfo schemaInfo, - @Suspended AsyncResponse asyncResponse) throws NotFoundException; + @Suspended AsyncResponse asyncResponse); @GET @Produces({"application/json"}) @@ -484,7 +483,7 @@ void getSchemaVersion(@ApiParam(value = "Group name", required = true) @PathPara @io.swagger.annotations.ApiResponse(code = 200, message = "List of all groups", response = ListGroupsResponse.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching the list of Groups", response = Void.class)}) void listGroups(@ApiParam(value = "Continuation token") @QueryParam("continuationToken") String continuationToken, - @ApiParam(value = "The numbers of items to return") @QueryParam("limit") Integer limit, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + @ApiParam(value = "The numbers of items to return") @QueryParam("limit") Integer limit, @Suspended AsyncResponse asyncResponse); @PUT @Path("/{groupName}/rules") @@ -496,7 +495,7 @@ void listGroups(@ApiParam(value = "Continuation token") @QueryParam("continuatio @io.swagger.annotations.ApiResponse(code = 409, message = "Write conflict", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while updating Group's schema validation rules", response = Void.class)}) void updateSchemaValidationRules(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "update group policy", required = true) UpdateValidationRulesRequest updateValidationRulesRequest, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + @ApiParam(value = "update group policy", required = true) UpdateValidationRulesRequest updateValidationRulesRequest, @Suspended AsyncResponse asyncResponse); @POST @Path("/{groupName}/schemas/versions/validate") @@ -508,7 +507,7 @@ void updateSchemaValidationRules(@ApiParam(value = "Group name", required = true @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while trying to validate schema", response = Void.class)}) void validate(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "Checks if schema is valid with respect to supplied validation rules", required = true) ValidateRequest validateRequest, @Suspended AsyncResponse asyncResponse) throws NotFoundException; + @ApiParam(value = "Checks if schema is valid with respect to supplied validation rules", required = true) ValidateRequest validateRequest, @Suspended AsyncResponse asyncResponse); } From 7027d88fd3f24a71ce6ab169caded2f965c1203a Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 11 Jun 2020 05:03:41 -0700 Subject: [PATCH 09/70] rename Signed-off-by: Shivesh Ranjan --- build.gradle | 2 -- .../AbstractPravegaDeserializer.java | 6 ++-- .../AbstractPravegaSerializer.java | 6 ++-- .../serializers/SerializerConfig.java | 29 ++++++++++--------- .../serializers/SerializerFactory.java | 18 ++++++------ 5 files changed, 30 insertions(+), 31 deletions(-) diff --git a/build.gradle b/build.gradle index e11dbf5d9..92a2beb48 100644 --- a/build.gradle +++ b/build.gradle @@ -199,9 +199,7 @@ project('serializers') { compile project(':common') compile project(':client') compile group: 'org.apache.avro', name: 'avro', version: avroVersion - compile group: 'org.apache.avro', name: 'avro-protobuf', version: avroProtobufVersion compile group: 'com.google.protobuf', name: 'protobuf-java', version: protobufProtocVersion - compile group: 'com.google.protobuf', name:'protobuf-gradle-plugin', version: protobufGradlePlugin compile group: 'com.google.protobuf', name: 'protobuf-java-util', version: protobufUtilVersion compile group: 'io.pravega', name: 'pravega-client', version: pravegaVersion compile group: 'org.xerial.snappy', name: 'snappy-java', version: snappyVersion diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java index be227486e..d25b59ac7 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java @@ -103,9 +103,9 @@ public T deserialize(ByteBuffer data) { writerSchema = encodingInfo.getSchemaInfo(); } - ByteBuffer uncompressed = decoder.decode(codecType, data); - byte[] array = new byte[uncompressed.remaining()]; - uncompressed.get(array); + ByteBuffer decoded = decoder.decode(codecType, data); + byte[] array = new byte[decoded.remaining()]; + decoded.get(array); InputStream inputStream = new ByteArrayInputStream(array); if (schemaInfo == null) { // deserialize into writer schema diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaSerializer.java index 6043a805f..3ed2664ca 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaSerializer.java @@ -106,9 +106,9 @@ public ByteBuffer serialize(T obj) { byte[] array = dataStream.toByteArray(); - ByteBuffer compressed = codec.encode(ByteBuffer.wrap(array)); - array = new byte[compressed.remaining()]; - compressed.get(array); + ByteBuffer encoded = codec.encode(ByteBuffer.wrap(array)); + array = new byte[encoded.remaining()]; + encoded.get(array); outputStream.write(array); return ByteBuffer.wrap(outputStream.toByteArray()); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index 5d22e5f24..276d71943 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -57,22 +57,22 @@ public class SerializerConfig { * It is recommended to register keep this flag as false in production systems and manage schema evolution explicitly and * in lockstep with upgrade of existing pravega client applications. */ - private final boolean autoRegisterSchema; + private final boolean registerSchema; /** * Flag to tell the serializer if the codec should be automatically registered before using the serializer in * {@link io.pravega.client.stream.EventStreamWriter}. * It is recommended to register keep this flag as false in production systems and manage codecTypes used by writers explicitly * so that readers are aware of encodings used. */ - private final boolean autoRegisterCodec; + private final boolean registerCodec; /** - * Codec to use for compressing events after serializing them. + * Codec to use for encoding events after serializing them. */ private final Codec codec; /** * Function that should be applied on serialized data read from stream. This is invoked after reading the codecType * from {@link EncodingInfo} and using the codec type read from it. - * It should return the uncompressed data back to the deserializer. + * It should return the decoded data back to the deserializer. */ private final Decoder decoder; /** @@ -84,9 +84,9 @@ public class SerializerConfig { * Flag to tell the serializer if the group should be created automatically. * It is recommended to register keep this flag as false in production systems and create groups and add schemas */ - private final boolean autoCreateGroup; + private final boolean createGroup; /** - * Group properties to use for creating the group if autoCreateGroup is set to true. + * Group properties to use for creating the group if createGroup is set to true. */ private final GroupProperties groupProperties; @@ -95,8 +95,9 @@ public static final class SerializerConfigBuilder { private Decoder decoder = new Decoder(); - private boolean autoRegisterSchema = false; - private boolean autoRegisterCodec = false; + private boolean registerSchema = false; + private boolean registerCodec = false; + private boolean createGroup = false; private boolean failOnCodecMismatch = true; private Either registryConfigOrClient = null; @@ -107,16 +108,16 @@ public SerializerConfigBuilder decoder(String codecType, Function Serializer avroSerializer(SerializerConfig config, AvroSche autoCreateGroup(schemaRegistryClient, config); registerCodec(schemaRegistryClient, config); String groupId = config.getGroupId(); - return new AvroSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), config.isAutoRegisterSchema()); + return new AvroSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), config.isRegisterSchema()); } /** @@ -146,7 +146,7 @@ public static Serializer avroMultiTypeSerializer(Se Map, AbstractPravegaSerializer> serializerMap = schemas .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> new AvroSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), - config.isAutoRegisterSchema()))); + config.isRegisterSchema()))); return new MultiplexedSerializer<>(serializerMap); } @@ -238,7 +238,7 @@ public static Serializer protobufSerializer(SerializerCon autoCreateGroup(schemaRegistryClient, config); registerCodec(schemaRegistryClient, config); return new ProtobufSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), - config.isAutoRegisterSchema()); + config.isRegisterSchema()); } /** @@ -312,7 +312,7 @@ public static Serializer protobufMultiTypeSeri Map, AbstractPravegaSerializer> serializerMap = schemas .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> new ProtobufSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), - config.isAutoRegisterSchema()))); + config.isRegisterSchema()))); return new MultiplexedSerializer<>(serializerMap); } @@ -395,7 +395,7 @@ public static Serializer jsonSerializer(SerializerConfig config, JSONSche autoCreateGroup(schemaRegistryClient, config); registerCodec(schemaRegistryClient, config); return new JsonSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), - config.isAutoRegisterSchema()); + config.isRegisterSchema()); } /** @@ -465,7 +465,7 @@ public static Serializer jsonMultiTypeSerializer( Map, AbstractPravegaSerializer> serializerMap = schemas .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> new JsonSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), - config.isAutoRegisterSchema()))); + config.isRegisterSchema()))); return new MultiplexedSerializer<>(serializerMap); } @@ -547,7 +547,7 @@ public static Serializer customSerializer(SerializerConfig config, Schema autoCreateGroup(schemaRegistryClient, config); registerCodec(schemaRegistryClient, config); return new AbstractPravegaSerializer(groupId, schemaRegistryClient, - schema, config.getCodec(), config.isAutoRegisterSchema()) { + schema, config.getCodec(), config.isRegisterSchema()) { @Override protected void serialize(T var, SchemaInfo schema, OutputStream outputStream) { serializer.serialize(var, schema, outputStream); @@ -652,13 +652,13 @@ public static Serializer deserializerAsJsonString(SerializerConfig confi // endregion private static void autoCreateGroup(SchemaRegistryClient client, SerializerConfig config) { - if (config.isAutoCreateGroup()) { + if (config.isCreateGroup()) { client.addGroup(config.getGroupId(), config.getGroupProperties()); } } private static void registerCodec(SchemaRegistryClient client, SerializerConfig config) { - if (config.isAutoRegisterCodec()) { + if (config.isRegisterCodec()) { client.addCodecType(config.getGroupId(), config.getCodec().getCodecType()); } } From 4be3f403fc409b7db20c7cca5a56a060db45c56f Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Fri, 12 Jun 2020 00:37:44 -0700 Subject: [PATCH 10/70] PR comment Signed-off-by: Shivesh Ranjan --- build.gradle | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 8747198ee..ae4ca69ff 100644 --- a/build.gradle +++ b/build.gradle @@ -123,7 +123,8 @@ project('common') { compile group: 'commons-io', name: 'commons-io', version: commonsioVersion compile group: 'com.google.guava', name: 'guava', version: guavaVersion compile group: 'io.pravega', name: 'pravega-common', version: pravegaVersion - //Do NOT add any additional dependencies here. + //Do NOT add any additional dependencies to common. All other sub projects depend on common and any project specific + //dependency should be added to the specific project. } javadoc { From a0a2db70e280dc693574b62fa1c35b52c909b15e Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Sun, 14 Jun 2020 18:33:26 -0700 Subject: [PATCH 11/70] marking interfaces as beta Signed-off-by: Shivesh Ranjan --- .../io/pravega/schemaregistry/client/SchemaRegistryClient.java | 2 ++ .../main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java | 2 ++ 2 files changed, 4 insertions(+) diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java index d0864b385..3853a7d47 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java @@ -9,6 +9,7 @@ */ package io.pravega.schemaregistry.client; +import com.google.common.annotations.Beta; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.GroupHistoryRecord; @@ -29,6 +30,7 @@ * Defines a registry client for interacting with schema registry service. * The implementation of this interface should provide atomicity and read-after-write-consistency guarantees for all the methods. */ +@Beta public interface SchemaRegistryClient { /** * Adds a new group. A group refers to the name under which the schemas are registered. A group is identified by a diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java index ec1ab7a39..c89016bd7 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java @@ -9,6 +9,7 @@ */ package io.pravega.schemaregistry.contract.v1; +import com.google.common.annotations.Beta; import io.pravega.schemaregistry.contract.generated.rest.model.AddedTo; import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; @@ -40,6 +41,7 @@ import javax.ws.rs.container.Suspended; import javax.ws.rs.core.Response; +@Beta public class ApiV1 { @Path("/ping") public interface Ping { From 3f64c11bcd7792d03d72d3fcae396af42ac618ad Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Tue, 16 Jun 2020 00:27:19 -0700 Subject: [PATCH 12/70] validation rules of list Signed-off-by: Shivesh Ranjan --- .../contract/data/SchemaValidationRules.java | 19 +------------------ .../contract/transform/ModelHelper.java | 7 ++++--- .../schemaregistry/contract/v1/ApiV1.java | 7 ++++--- 3 files changed, 9 insertions(+), 24 deletions(-) diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java index 805171710..3725e0d0f 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java @@ -9,15 +9,12 @@ */ package io.pravega.schemaregistry.contract.data; -import com.google.common.base.Preconditions; import io.pravega.common.ObjectBuilder; import lombok.Builder; import lombok.Data; import java.util.Collections; -import java.util.List; import java.util.Map; -import java.util.stream.Collectors; /** * Schema validation rules that are applied for checking if a schema is valid. @@ -46,21 +43,7 @@ private SchemaValidationRules(Map rules) { public static SchemaValidationRules of(Compatibility compatibility) { return new SchemaValidationRules(Collections.singletonMap(compatibility.getName(), compatibility)); } - - /** - * Method to create SchemaValidationRules from the list of supplied rules. If multiple same rule are present - * in the list then only the latest rule of each type is added to the Rules map. - * Currently the only rule supported is {@link Compatibility}. - * - * @param rules List of rules. - * @return SchemaValidationRules object. - */ - public static SchemaValidationRules of(List rules) { - Preconditions.checkNotNull(rules); - Preconditions.checkArgument(rules.stream().allMatch(x -> x instanceof Compatibility), "Only compatibility rule is supported."); - return new SchemaValidationRules(rules.stream().collect(Collectors.toMap(SchemaValidationRule::getName, x -> x))); - } - + public static class SchemaValidationRulesBuilder implements ObjectBuilder { } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java index fa261ab42..50ae9969e 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java @@ -60,8 +60,7 @@ public static io.pravega.schemaregistry.contract.data.SerializationFormat decode } public static io.pravega.schemaregistry.contract.data.SchemaValidationRules decode(SchemaValidationRules rules) { - Preconditions.checkArgument(rules != null); - Preconditions.checkArgument(rules.getRules() != null); + Preconditions.checkArgument(rules != null && rules.getRules() != null && rules.getRules().size() == 1); List list = rules.getRules().entrySet().stream().map(rule -> { if (rule.getValue().getRule() instanceof Map) { String name = (String) ((Map) rule.getValue().getRule()).get("name"); @@ -74,7 +73,9 @@ public static io.pravega.schemaregistry.contract.data.SchemaValidationRules deco throw new IllegalArgumentException("Rule not supported"); } }).collect(Collectors.toList()); - return io.pravega.schemaregistry.contract.data.SchemaValidationRules.of(list); + + return io.pravega.schemaregistry.contract.data.SchemaValidationRules.of( + (io.pravega.schemaregistry.contract.data.Compatibility) list.get(0)); } public static io.pravega.schemaregistry.contract.data.Compatibility decode(Compatibility compatibility) { diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java index c89016bd7..57026dc2e 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java @@ -41,7 +41,6 @@ import javax.ws.rs.container.Suspended; import javax.ws.rs.core.Response; -@Beta public class ApiV1 { @Path("/ping") public interface Ping { @@ -55,6 +54,7 @@ public interface Ping { */ @Path("/v1/groups") @io.swagger.annotations.Api(description = "the groups API") + @Beta public interface GroupsApi { @POST @Path("/{groupName}/codecTypes") @@ -284,6 +284,7 @@ Response validate(@ApiParam(value = "Group name", required = true) @PathParam("g */ @Path("/v1/groups") @io.swagger.annotations.Api(description = "the groups API") + @Beta public interface GroupsApiAsync { @POST @Path("/{groupName}/codecTypes") @@ -512,13 +513,13 @@ void validate(@ApiParam(value = "Group name", required = true) @PathParam("group @ApiParam(value = "Checks if schema is valid with respect to supplied validation rules", required = true) ValidateRequest validateRequest, @Suspended AsyncResponse asyncResponse); } - /** * Sync Schemas apis. Identical to {@link SchemasApiAsync}. All methods in this interface are synchronous and return {@link Response} object. * The purposes of this interface is to be used by proxy-client. */ @Path("/v1/schemas") @io.swagger.annotations.Api(description = "the schemas API") + @Beta public interface SchemasApi { @POST @Path("/addedTo") @@ -538,6 +539,7 @@ public interface SchemasApi { */ @Path("/v1/schemas") @io.swagger.annotations.Api(description = "the schemas API") + @Beta public interface SchemasApiAsync { @POST @Path("/addedTo") @@ -551,5 +553,4 @@ public interface SchemasApiAsync { void getSchemaReferences(@ApiParam(value = "Get schema references for the supplied schema", required = true) SchemaInfo schemaInfo, @Suspended AsyncResponse asyncResponse); } - } From a449b4bf0dcaed67d316751a758ec3814c271df4 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Tue, 16 Jun 2020 04:15:21 -0700 Subject: [PATCH 13/70] remove Validation rules of list method Signed-off-by: Shivesh Ranjan --- .../contract/data/Compatibility.java | 2 +- .../contract/data/SchemaValidationRules.java | 19 +------------------ .../contract/transform/ModelHelper.java | 13 +++++++------ .../schemaregistry/contract/v1/ApiV1.java | 7 ++++--- .../contract/transform/ModelHelperTest.java | 8 ++++++-- 5 files changed, 19 insertions(+), 30 deletions(-) diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java index f8717337c..278583539 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java @@ -56,7 +56,7 @@ private Compatibility(Type compatibility) { this(compatibility, null, null); } - public Compatibility(Type compatibility, VersionInfo backwardTill, VersionInfo forwardTill) { + private Compatibility(Type compatibility, VersionInfo backwardTill, VersionInfo forwardTill) { this.compatibility = compatibility; this.backwardTill = backwardTill; this.forwardTill = forwardTill; diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java index 805171710..3725e0d0f 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java @@ -9,15 +9,12 @@ */ package io.pravega.schemaregistry.contract.data; -import com.google.common.base.Preconditions; import io.pravega.common.ObjectBuilder; import lombok.Builder; import lombok.Data; import java.util.Collections; -import java.util.List; import java.util.Map; -import java.util.stream.Collectors; /** * Schema validation rules that are applied for checking if a schema is valid. @@ -46,21 +43,7 @@ private SchemaValidationRules(Map rules) { public static SchemaValidationRules of(Compatibility compatibility) { return new SchemaValidationRules(Collections.singletonMap(compatibility.getName(), compatibility)); } - - /** - * Method to create SchemaValidationRules from the list of supplied rules. If multiple same rule are present - * in the list then only the latest rule of each type is added to the Rules map. - * Currently the only rule supported is {@link Compatibility}. - * - * @param rules List of rules. - * @return SchemaValidationRules object. - */ - public static SchemaValidationRules of(List rules) { - Preconditions.checkNotNull(rules); - Preconditions.checkArgument(rules.stream().allMatch(x -> x instanceof Compatibility), "Only compatibility rule is supported."); - return new SchemaValidationRules(rules.stream().collect(Collectors.toMap(SchemaValidationRule::getName, x -> x))); - } - + public static class SchemaValidationRulesBuilder implements ObjectBuilder { } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java index fa261ab42..47beb9d28 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java @@ -60,8 +60,7 @@ public static io.pravega.schemaregistry.contract.data.SerializationFormat decode } public static io.pravega.schemaregistry.contract.data.SchemaValidationRules decode(SchemaValidationRules rules) { - Preconditions.checkArgument(rules != null); - Preconditions.checkArgument(rules.getRules() != null); + Preconditions.checkArgument(rules != null && rules.getRules() != null && rules.getRules().size() == 1); List list = rules.getRules().entrySet().stream().map(rule -> { if (rule.getValue().getRule() instanceof Map) { String name = (String) ((Map) rule.getValue().getRule()).get("name"); @@ -74,7 +73,9 @@ public static io.pravega.schemaregistry.contract.data.SchemaValidationRules deco throw new IllegalArgumentException("Rule not supported"); } }).collect(Collectors.toList()); - return io.pravega.schemaregistry.contract.data.SchemaValidationRules.of(list); + + return io.pravega.schemaregistry.contract.data.SchemaValidationRules.of( + (io.pravega.schemaregistry.contract.data.Compatibility) list.get(0)); } public static io.pravega.schemaregistry.contract.data.Compatibility decode(Compatibility compatibility) { @@ -94,9 +95,9 @@ public static io.pravega.schemaregistry.contract.data.Compatibility decode(Compa io.pravega.schemaregistry.contract.data.VersionInfo backwardTill = compatibility.getBackwardTill() == null ? null : decode(compatibility.getBackwardTill()); io.pravega.schemaregistry.contract.data.VersionInfo forwardTill = compatibility.getForwardTill() == null ? null : decode(compatibility.getForwardTill()); - return new io.pravega.schemaregistry.contract.data.Compatibility( - searchEnum(io.pravega.schemaregistry.contract.data.Compatibility.Type.class, compatibility.getPolicy().name()), - backwardTill, forwardTill); + return io.pravega.schemaregistry.contract.data.Compatibility.builder().compatibility( + searchEnum(io.pravega.schemaregistry.contract.data.Compatibility.Type.class, compatibility.getPolicy().name())) + .backwardTill(backwardTill).forwardTill(forwardTill).build(); } public static io.pravega.schemaregistry.contract.data.VersionInfo decode(VersionInfo versionInfo) { diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java index c89016bd7..57026dc2e 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java @@ -41,7 +41,6 @@ import javax.ws.rs.container.Suspended; import javax.ws.rs.core.Response; -@Beta public class ApiV1 { @Path("/ping") public interface Ping { @@ -55,6 +54,7 @@ public interface Ping { */ @Path("/v1/groups") @io.swagger.annotations.Api(description = "the groups API") + @Beta public interface GroupsApi { @POST @Path("/{groupName}/codecTypes") @@ -284,6 +284,7 @@ Response validate(@ApiParam(value = "Group name", required = true) @PathParam("g */ @Path("/v1/groups") @io.swagger.annotations.Api(description = "the groups API") + @Beta public interface GroupsApiAsync { @POST @Path("/{groupName}/codecTypes") @@ -512,13 +513,13 @@ void validate(@ApiParam(value = "Group name", required = true) @PathParam("group @ApiParam(value = "Checks if schema is valid with respect to supplied validation rules", required = true) ValidateRequest validateRequest, @Suspended AsyncResponse asyncResponse); } - /** * Sync Schemas apis. Identical to {@link SchemasApiAsync}. All methods in this interface are synchronous and return {@link Response} object. * The purposes of this interface is to be used by proxy-client. */ @Path("/v1/schemas") @io.swagger.annotations.Api(description = "the schemas API") + @Beta public interface SchemasApi { @POST @Path("/addedTo") @@ -538,6 +539,7 @@ public interface SchemasApi { */ @Path("/v1/schemas") @io.swagger.annotations.Api(description = "the schemas API") + @Beta public interface SchemasApiAsync { @POST @Path("/addedTo") @@ -551,5 +553,4 @@ public interface SchemasApiAsync { void getSchemaReferences(@ApiParam(value = "Get schema references for the supplied schema", required = true) SchemaInfo schemaInfo, @Suspended AsyncResponse asyncResponse); } - } diff --git a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java index c724715db..c9be6894f 100644 --- a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java +++ b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java @@ -16,6 +16,7 @@ import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRule; import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; import io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion; import io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat; @@ -32,7 +33,9 @@ public class ModelHelperTest { @Test public void testDecode() { SerializationFormat type = new SerializationFormat().serializationFormat(SerializationFormat.SerializationFormatEnum.CUSTOM).customTypeName("a"); - SchemaValidationRules rules = new SchemaValidationRules().rules(Collections.emptyMap()); + SchemaValidationRules rules = new SchemaValidationRules().rules(Collections.singletonMap(Compatibility.class.getSimpleName(), + new SchemaValidationRule().rule(new Compatibility().name(Compatibility.class.getSimpleName()) + .policy(Compatibility.PolicyEnum.BACKWARD)))); SchemaInfo schema = new SchemaInfo() .type("a").serializationFormat(type).schemaData(new byte[0]).properties(Collections.emptyMap()); VersionInfo version = new VersionInfo().type("a").version(1).ordinal(1); @@ -55,7 +58,8 @@ public void testDecode() { assertEquals(compatibilityDecoded.getCompatibility(), io.pravega.schemaregistry.contract.data.Compatibility.Type.BackwardAndForwardTill); io.pravega.schemaregistry.contract.data.SchemaValidationRules rulesDecoded = ModelHelper.decode(rules); - assertEquals(rulesDecoded.getRules().size(), 0); + assertEquals(rulesDecoded.getRules().size(), 1); + assertEquals(rulesDecoded.getRules().values().iterator().next().getName(), Compatibility.class.getSimpleName()); io.pravega.schemaregistry.contract.data.VersionInfo versionInfo = ModelHelper.decode(version); assertEquals(versionInfo.getType(), version.getType()); From 9a504649ddde6f2656bcd2ba36348d7fb79b905e Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Tue, 16 Jun 2020 06:27:24 -0700 Subject: [PATCH 14/70] PR comment Signed-off-by: Shivesh Ranjan --- .../pravega/schemaregistry/client/SchemaRegistryClient.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java index 3853a7d47..95f457fcf 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java @@ -34,9 +34,9 @@ public interface SchemaRegistryClient { /** * Adds a new group. A group refers to the name under which the schemas are registered. A group is identified by a - * unique name and has an associated set of group metadata {@link GroupProperties} and a list of codec types and a + * unique id and has an associated set of group metadata {@link GroupProperties} and a list of codec types and a * versioned history of schemas that were registered under the group. - * Add group is idempotent. If the group by the same name already exists the api will return false. + * Add group is idempotent. If the group by the same id already exists the api will return false. * * @param groupId Id for the group that uniquely identifies the group. * @param groupProperties groupProperties Group properties for the group. These include serialization format, validation rules, @@ -108,7 +108,7 @@ boolean updateSchemaValidationRules(String groupId, SchemaValidationRules valida * Schemas are retrieved atomically. So all schemas added before this call will be returned by this call. * * @param groupId Id for the group. - * @return List of different objects within the group. + * @return Unordered list of different objects within the group. * @throws ResourceNotFoundException if group is not found. * @throws UnauthorizedException if the user is unauthorized. */ From 8cc3091a40bc8316f6b7cf87ae74df0c0a79979e Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 17 Jun 2020 02:47:51 -0700 Subject: [PATCH 15/70] PR comment Signed-off-by: Shivesh Ranjan --- .../schemaregistry/client/SchemaRegistryClient.java | 3 ++- .../pravega/schemaregistry/contract/data/VersionInfo.java | 7 +++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java index 95f457fcf..f5e4d9a22 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java @@ -121,7 +121,8 @@ boolean updateSchemaValidationRules(String groupId, SchemaValidationRules valida * type {@link SchemaInfo#type} could be registered. * All schemas with same type are assigned monotonically increasing version numbers. * Implementation of this method is expected to be idempotent. The behaviour of Add Schema API on the schema registry - * service is idempotent. If a schema is already registered, its version info is returned by the service. + * service is idempotent. The service assigns and returns a new version info object to identify the given schema. + * If a schema was already registered, the existing version info is returned by the service. * * @param groupId Id for the group. * @param schemaInfo Schema to add. diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java index c281e75e3..3edde4c11 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java @@ -16,12 +16,15 @@ /** * Version information object that encapsulates properties that uniquely identify a specific version of a schema within a group. + * This is generated by the service when a new schema is added to the group. It identifies the schema that is added and includes + * the version and ordinal assigned by the service to the schema. * * {@link VersionInfo#type} is same as {@link SchemaInfo#type} which represents the object type for which the version is computed. * {@link VersionInfo#version} the registry assigned monotonically increasing version number for the schema for specific object type. * Since the version number is per object type, so type and version number forms a unique pair. - * {@link VersionInfo#ordinal} Absolute ordinal of the schema for all schemas in the group. This uniquely identifies the - * version within a group. + * {@link VersionInfo#ordinal} This is an Id that is assigned by the service and uniquely identifies the schema version within + * the group. The ordinals are assigned as monotonically increasing numbers for all schemas added to the group. + * So the ordinal also represents the absolute order of the schema for all schemas in the group. */ @Data @Builder From 5a7d6396343d2e54cec9b2ca77616e8ff385c2d9 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 18 Jun 2020 05:03:34 -0700 Subject: [PATCH 16/70] Removing schema validation rules Signed-off-by: Shivesh Ranjan --- .../client/SchemaRegistryClient.java | 22 +- .../client/SchemaRegistryClientImpl.java | 28 +-- .../client/TestSchemaRegistryClient.java | 43 ++-- .../contract/data/BackwardAndForward.java | 109 +++++++++ .../contract/data/Compatibility.java | 199 ++++++++-------- .../contract/data/GroupHistoryRecord.java | 6 +- .../contract/data/GroupProperties.java | 20 +- .../contract/data/SchemaValidationRule.java | 23 -- .../contract/data/SchemaValidationRules.java | 50 ---- .../contract/data/SerializationFormat.java | 2 +- .../contract/data/VersionInfo.java | 10 +- .../generated/rest/model/AddedTo.java | 4 +- .../generated/rest/model/Backward.java | 92 ++++++++ .../rest/model/BackwardAndForward.java | 117 ++++++++++ .../generated/rest/model/BackwardPolicy.java | 92 ++++++++ .../generated/rest/model/BackwardTill.java | 118 ++++++++++ .../rest/model/BackwardTransitive.java | 92 ++++++++ .../generated/rest/model/CanRead.java | 4 +- .../generated/rest/model/Compatibility.java | 105 ++------- .../generated/rest/model/Forward.java | 92 ++++++++ .../generated/rest/model/ForwardPolicy.java | 92 ++++++++ .../generated/rest/model/ForwardTill.java | 118 ++++++++++ ...dationRule.java => ForwardTransitive.java} | 40 ++-- .../rest/model/GroupHistoryRecord.java | 36 +-- .../generated/rest/model/GroupProperties.java | 32 +-- .../generated/rest/model/SchemaInfo.java | 2 +- .../rest/model/SchemaValidationRules.java | 103 --------- .../model/UpdateCompatibilityRequest.java | 116 ++++++++++ .../model/UpdateValidationRulesRequest.java | 116 ---------- .../contract/generated/rest/model/Valid.java | 4 +- .../generated/rest/model/ValidateRequest.java | 30 +-- .../generated/rest/model/VersionInfo.java | 30 +-- .../contract/transform/ModelHelper.java | 218 ++++++++++++------ .../schemaregistry/contract/v1/ApiV1.java | 96 ++++---- contract/src/main/swagger/README.md | 2 +- contract/src/main/swagger/SchemaRegistry.yaml | 212 +++++++++++------ .../contract/transform/ModelHelperTest.java | 152 +++++++++--- 37 files changed, 1769 insertions(+), 858 deletions(-) create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/BackwardAndForward.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRule.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Backward.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardAndForward.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardPolicy.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTill.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTransitive.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Forward.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardPolicy.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTill.java rename contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/{SchemaValidationRule.java => ForwardTransitive.java} (54%) delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRules.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateCompatibilityRequest.java delete mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateValidationRulesRequest.java diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java index f5e4d9a22..238b4b80f 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java @@ -10,12 +10,12 @@ package io.pravega.schemaregistry.client; import com.google.common.annotations.Beta; +import io.pravega.schemaregistry.contract.data.Compatibility; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.GroupHistoryRecord; import io.pravega.schemaregistry.contract.data.GroupProperties; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.contract.data.SchemaValidationRules; import io.pravega.schemaregistry.contract.data.SchemaWithVersion; import io.pravega.schemaregistry.contract.data.VersionInfo; @@ -75,7 +75,7 @@ public interface SchemaRegistryClient { * Get group properties for the group identified by the group id. * * {@link GroupProperties#serializationFormat} which identifies the serialization format is used to describe the schema. - * {@link GroupProperties#schemaValidationRules} sets the schema validation policy that needs to be enforced for evolving schemas. + * {@link GroupProperties#compatibility} sets the schema validation policy that needs to be enforced for evolving schemas. * {@link GroupProperties#allowMultipleTypes} that specifies if multiple schemas are allowed to be registered in the group. * Schemas are validated against existing schema versions that have the same {@link SchemaInfo#type}. * {@link GroupProperties#properties} describes generic properties for a group. @@ -90,17 +90,17 @@ public interface SchemaRegistryClient { /** * Update group's schema validation policy. If previous rules are not supplied, then the update to the rules will be * performed unconditionally. However, if previous rules are supplied, then the update will be performed if and only if - * existing {@link GroupProperties#schemaValidationRules} match previous rules. + * existing {@link GroupProperties#compatibility} match previous rules. * * @param groupId Id for the group. - * @param validationRules New Schema validation rules for the group. - * @param previousRules Previous schema validation rules. + * @param validationRules New Compatibility for the group. + * @param previousRules Previous compatibility. * @return true if the update was accepted by the service, false if it was rejected because of precondition failure. * Precondition failure can occur if previous rules were specified and they do not match the rules set on the group. * @throws ResourceNotFoundException if group is not found. * @throws UnauthorizedException if the user is unauthorized. */ - boolean updateSchemaValidationRules(String groupId, SchemaValidationRules validationRules, @Nullable SchemaValidationRules previousRules) + boolean updateCompatibility(String groupId, Compatibility validationRules, @Nullable Compatibility previousRules) throws ResourceNotFoundException, UnauthorizedException; /** @@ -128,7 +128,7 @@ boolean updateSchemaValidationRules(String groupId, SchemaValidationRules valida * @param schemaInfo Schema to add. * @return versionInfo which uniquely identifies where the schema is added in the group. If schema is already registered, * then the existing version info is returned. - * @throws SchemaValidationFailedException if the schema is deemed invalid by applying schema validation rules which may + * @throws SchemaValidationFailedException if the schema is deemed invalid by applying compatibility which may * include comparing schema with existing schemas for compatibility in the desired direction. * @throws SerializationMismatchException if serialization format does not match the group's configured serialization format. * @throws MalformedSchemaException for known serialization formats, if the service is unable to parse the schema binary or @@ -289,15 +289,15 @@ SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schema /** * Checks whether given schema is valid by applying validation rules against previous schemas in the group - * subject to current {@link GroupProperties#schemaValidationRules} policy. + * subject to current {@link GroupProperties#compatibility} policy. * The invocation of this method will perform exactly the same validations as {@link SchemaRegistryClient#addSchema(String, SchemaInfo)} * but without registering the schema. This is primarily intended to be used during schema development phase to validate that * the changes to schema are in compliance with validation rules for the group. * * @param groupId Id for the group. * @param schemaInfo Schema to check for validity. - * @return A schema is valid if it passes all the {@link GroupProperties#schemaValidationRules}. The rule supported - * presently, is Compatibility. If desired compatibility is satisfied by the schema then this method returns true, false otherwise. + * @return A schema is valid if it passes all the {@link GroupProperties#compatibility}. The rule supported + * are allow any, deny all or a combination of BackwardAndForward. If desired compatibility is satisfied by the schema then this method returns true, false otherwise. * @throws ResourceNotFoundException if group is not found. * @throws UnauthorizedException if the user is unauthorized. */ @@ -305,7 +305,7 @@ SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schema /** * Checks whether given schema can be used to read by validating it for reads against one or more existing schemas in the group - * subject to current {@link GroupProperties#schemaValidationRules} policy. + * subject to current {@link GroupProperties#compatibility} policy. * * @param groupId Id for the group. * @param schemaInfo Schema to check to be used for reads. diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java index 5e46b69c1..5314e89db 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java @@ -1,10 +1,10 @@ /** * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.client; @@ -13,12 +13,12 @@ import io.pravega.common.Exceptions; import io.pravega.common.util.Retry; import io.pravega.schemaregistry.common.ContinuationTokenIterator; +import io.pravega.schemaregistry.contract.data.Compatibility; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.GroupHistoryRecord; import io.pravega.schemaregistry.contract.data.GroupProperties; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.contract.data.SchemaValidationRules; import io.pravega.schemaregistry.contract.data.SchemaWithVersion; import io.pravega.schemaregistry.contract.data.VersionInfo; import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; @@ -27,7 +27,7 @@ import io.pravega.schemaregistry.contract.generated.rest.model.GetEncodingIdRequest; import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; -import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.UpdateCompatibilityRequest; import io.pravega.schemaregistry.contract.generated.rest.model.Valid; import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; import io.pravega.schemaregistry.contract.transform.ModelHelper; @@ -91,7 +91,7 @@ public boolean addGroup(String groupId, GroupProperties groupProperties) { case CONFLICT: return false; case BAD_REQUEST: - throw new BadArgumentException("Group properties invalid. Verify that schema validation rules include compatibility."); + throw new BadArgumentException("Group properties invalid."); default: throw new InternalServerError("Internal Service error. Failed to add the group."); } @@ -155,15 +155,15 @@ public GroupProperties getGroupProperties(String groupId) { } @Override - public boolean updateSchemaValidationRules(String groupId, SchemaValidationRules validationRules, @Nullable SchemaValidationRules previousRules) { + public boolean updateCompatibility(String groupId, Compatibility compatibility, @Nullable Compatibility previousRules) { return withRetry(() -> { - UpdateValidationRulesRequest request = new UpdateValidationRulesRequest() - .validationRules(ModelHelper.encode(validationRules)); + UpdateCompatibilityRequest request = new UpdateCompatibilityRequest() + .compatibility(ModelHelper.encode(compatibility)); if (previousRules != null) { - request.setPreviousRules(ModelHelper.encode(previousRules)); + request.setPreviousCompatibility(ModelHelper.encode(previousRules)); } - Response response = groupProxy.updateSchemaValidationRules(groupId, request); + Response response = groupProxy.updateCompatibility(groupId, request); switch (Response.Status.fromStatusCode(response.getStatus())) { case CONFLICT: return false; @@ -172,7 +172,7 @@ public boolean updateSchemaValidationRules(String groupId, SchemaValidationRules case OK: return true; default: - throw new InternalServerError("Internal Service error. Failed to update schema validation rules."); + throw new InternalServerError("Internal Service error. Failed to update compatibility."); } }); } @@ -221,7 +221,7 @@ public VersionInfo addSchema(String groupId, SchemaInfo schemaInfo) { @Override public void deleteSchemaVersion(String groupId, VersionInfo versionInfo) { withRetry(() -> { - Response response = groupProxy.deleteSchemaFromVersionOrdinal(groupId, versionInfo.getOrdinal()); + Response response = groupProxy.deleteSchemaForId(groupId, versionInfo.getId()); if (response.getStatus() == Response.Status.NOT_FOUND.getStatusCode()) { throw new ResourceNotFoundException("Group not found."); } else if (response.getStatus() != Response.Status.NO_CONTENT.getStatusCode()) { @@ -245,7 +245,7 @@ public void deleteSchemaVersion(String groupId, String schemaType, int version) @Override public SchemaInfo getSchemaForVersion(String groupId, VersionInfo versionInfo) { return withRetry(() -> { - Response response = groupProxy.getSchemaFromVersionOrdinal(groupId, versionInfo.getOrdinal()); + Response response = groupProxy.getSchemaForId(groupId, versionInfo.getId()); switch (Response.Status.fromStatusCode(response.getStatus())) { case OK: return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo.class)); @@ -311,7 +311,7 @@ public EncodingId getEncodingId(String groupId, VersionInfo versionInfo, String public SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schemaType) { List list = latestSchemas(groupId, schemaType); if (schemaType == null) { - return list.stream().max(Comparator.comparingInt(x -> x.getVersionInfo().getOrdinal())).orElse(null); + return list.stream().max(Comparator.comparingInt(x -> x.getVersionInfo().getId())).orElse(null); } else { return list.get(0); } diff --git a/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java b/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java index f4427b183..d80b4ce84 100644 --- a/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java +++ b/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java @@ -11,11 +11,11 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import io.pravega.schemaregistry.contract.data.BackwardAndForward; import io.pravega.schemaregistry.contract.data.Compatibility; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.contract.data.SchemaValidationRules; import io.pravega.schemaregistry.contract.data.SchemaWithVersion; import io.pravega.schemaregistry.contract.data.SerializationFormat; import io.pravega.schemaregistry.contract.data.VersionInfo; @@ -54,7 +54,7 @@ public void testGroup() { // add group // 1. success response code io.pravega.schemaregistry.contract.data.GroupProperties groupProperties = new io.pravega.schemaregistry.contract.data.GroupProperties( - SerializationFormat.Avro, SchemaValidationRules.of(Compatibility.backward()), true); + SerializationFormat.Avro, Compatibility.backward(), true); doReturn(response).when(proxy).createGroup(any()); doReturn(Response.Status.CREATED.getStatusCode()).when(response).getStatus(); boolean addGroup = client.addGroup("grp1", groupProperties); @@ -79,7 +79,7 @@ public void testGroup() { GroupProperties mygroup = new GroupProperties().properties(Collections.emptyMap()) .serializationFormat(new io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat() .serializationFormat(io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat.SerializationFormatEnum.ANY)) - .schemaValidationRules(ModelHelper.encode(SchemaValidationRules.of(Compatibility.backward()))) + .compatibility(ModelHelper.encode(Compatibility.backward())) .allowMultipleTypes(false); String groupName = "mygroup"; ListGroupsResponse groupList = new ListGroupsResponse().groups(Collections.singletonMap(groupName, mygroup)).continuationToken("token"); @@ -92,7 +92,7 @@ public void testGroup() { Map.Entry group = groups.stream().filter(x -> x.getKey().equals(groupName)).findAny().orElseThrow(RuntimeException::new); assertEquals(group.getValue().getSerializationFormat(), SerializationFormat.Any); - assertEquals(group.getValue().getSchemaValidationRules().getRules().get(Compatibility.class.getSimpleName()), Compatibility.backward()); + assertTrue(group.getValue().getCompatibility().getBackwardAndForward().getBackwardPolicy() instanceof BackwardAndForward.Backward); reset(response); } @@ -107,7 +107,7 @@ public void testListGroup() { GroupProperties mygroup = new GroupProperties().properties(Collections.emptyMap()) .serializationFormat(new io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat() .serializationFormat(io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat.SerializationFormatEnum.ANY)) - .schemaValidationRules(ModelHelper.encode(SchemaValidationRules.of(Compatibility.backward()))) + .compatibility(ModelHelper.encode(Compatibility.backward())) .allowMultipleTypes(false); String groupId = "mygroup"; ListGroupsResponse groupList = new ListGroupsResponse().groups(Collections.singletonMap(groupId, mygroup)).continuationToken("token"); @@ -125,7 +125,7 @@ public void testListGroup() { Map.Entry group = groups.stream().filter(x -> x.getKey().equals(groupId)).findAny().orElseThrow(RuntimeException::new); assertEquals(group.getValue().getSerializationFormat(), SerializationFormat.Any); - assertEquals(group.getValue().getSchemaValidationRules().getRules().get(Compatibility.class.getSimpleName()), Compatibility.backward()); + assertTrue(group.getValue().getCompatibility().getBackwardAndForward().getBackwardPolicy() instanceof BackwardAndForward.Backward); // Runtime Exception doReturn(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()).when(response).getStatus(); @@ -161,13 +161,12 @@ public void testGetGroupProperties() { .serializationFormat(new io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat() .serializationFormat( io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat.SerializationFormatEnum.ANY)) - .schemaValidationRules(ModelHelper.encode(SchemaValidationRules.of(Compatibility.backward()))) + .compatibility(ModelHelper.encode(Compatibility.backward())) .allowMultipleTypes(false); doReturn(mygroup).when(response).readEntity(eq(GroupProperties.class)); io.pravega.schemaregistry.contract.data.GroupProperties groupProperties = client.getGroupProperties("mygroup"); assertEquals(groupProperties.getSerializationFormat(), SerializationFormat.Any); - assertEquals(groupProperties.getSchemaValidationRules().getRules().get(Compatibility.class.getSimpleName()), - Compatibility.backward()); + assertTrue(groupProperties.getCompatibility().getBackwardAndForward().getBackwardPolicy() instanceof BackwardAndForward.Backward); // ResourceNotFoundException doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); AssertExtensions.assertThrows("An exception should have been thrown", () -> client.getGroupProperties( @@ -179,28 +178,28 @@ public void testGetGroupProperties() { } @Test - public void testUpdateSchemaValidationRules() { + public void testUpdateCompatibility() { ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); Response response = mock(Response.class); - doReturn(response).when(proxy).updateSchemaValidationRules(anyString(), any()); + doReturn(response).when(proxy).updateCompatibility(anyString(), any()); doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); - SchemaValidationRules schemaValidationRules = SchemaValidationRules.of(Compatibility.backward()); - client.updateSchemaValidationRules("mygroup", schemaValidationRules, null); + Compatibility compatibility = Compatibility.backward(); + client.updateCompatibility("mygroup", compatibility, null); assertEquals(response.getStatus(), Response.Status.OK.getStatusCode()); // Precondition Failed doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); - assertFalse(client.updateSchemaValidationRules("mygroup", schemaValidationRules, null)); + assertFalse(client.updateCompatibility("mygroup", compatibility, null)); // NotFound exception doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); AssertExtensions.assertThrows("An exception should have been thrown", - () -> client.updateSchemaValidationRules("mygroup", schemaValidationRules, null), + () -> client.updateCompatibility("mygroup", compatibility, null), e -> e instanceof ResourceNotFoundException); // Runtime Exception doReturn(Response.Status.EXPECTATION_FAILED.getStatusCode()).when(response).getStatus(); AssertExtensions.assertThrows("An exception should have been thrown", - () -> client.updateSchemaValidationRules("mygroup", schemaValidationRules, null), + () -> client.updateCompatibility("mygroup", compatibility, null), e -> e instanceof InternalServerError); } @@ -247,13 +246,13 @@ public void testAddSchema() { SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo versionInfo = new io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo().version( - 5).type("schema2").ordinal(5); + 5).type("schema2").id(5); doReturn(versionInfo).when(response).readEntity( io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo.class); VersionInfo versionInfo1 = client.addSchema("mygroup", schemaInfo); assertEquals(5, versionInfo1.getVersion()); assertEquals("schema2", versionInfo1.getType()); - assertEquals(5, versionInfo1.getOrdinal()); + assertEquals(5, versionInfo1.getId()); // NotFound Exception doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); AssertExtensions.assertThrows("An exception should have been thrown", @@ -277,7 +276,7 @@ public void testGetSchema() { ApiV1.GroupsApi proxy = mock(ApiV1.GroupsApi.class); SchemaRegistryClientImpl client = new SchemaRegistryClientImpl(proxy); Response response = mock(Response.class); - doReturn(response).when(proxy).getSchemaFromVersionOrdinal(anyString(), anyInt()); + doReturn(response).when(proxy).getSchemaForId(anyString(), anyInt()); doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat serializationFormat = ModelHelper.encode(SerializationFormat.custom("custom")); @@ -421,16 +420,16 @@ public void testGroupEvolutionHistory() { SerializationFormat serializationFormat = SerializationFormat.custom("custom"); ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); - SchemaValidationRules schemaValidationRules = SchemaValidationRules.of(Compatibility.backward()); + Compatibility compatibility = Compatibility.backward(); GroupHistoryRecord groupHistoryRecord = new io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord() .schemaInfo(ModelHelper.encode(schemaInfo)).version(ModelHelper.encode(versionInfo)) - .validationRules(ModelHelper.encode(schemaValidationRules)).timestamp(100L).schemaString(""); + .compatibility(ModelHelper.encode(compatibility)).timestamp(100L).schemaString(""); GroupHistory history = new GroupHistory(); history.addHistoryItem(groupHistoryRecord); doReturn(history).when(response).readEntity(GroupHistory.class); List groupHistoryList = client.getGroupHistory("mygroup"); assertEquals(1, groupHistoryList.size()); - assertEquals(schemaValidationRules, groupHistoryList.get(0).getRules()); + assertEquals(compatibility, groupHistoryList.get(0).getCompatibility()); assertEquals(schemaInfo, groupHistoryList.get(0).getSchema()); assertEquals(versionInfo, groupHistoryList.get(0).getVersion()); assertEquals(100L, groupHistoryList.get(0).getTimestamp()); diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/BackwardAndForward.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/BackwardAndForward.java new file mode 100644 index 000000000..9b3c2fe4c --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/BackwardAndForward.java @@ -0,0 +1,109 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import com.google.common.base.Preconditions; +import io.pravega.common.ObjectBuilder; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.EqualsAndHashCode; + +/** + * Defines different BackwardAndForward policy options for schema evolution for schemas within a group. + * The choice of compatibility policy tells the Schema Registry service whether a schema should be accepted to evolve + * into new schema by comparing it with one or more existing versions of the schema. + * + * {@link Backward}: a new schema can be used to read data written by previous schema. + * {@link BackwardTransitive}: a new schema can be used read data written by any of previous schemas. + * {@link BackwardTill}: a new schema can be used to read data written by any of previous schemas till specified schema. + * {@link Forward}: previous schema can be used to read data written by new schema. + * {@link ForwardTransitive}: all previous schemas can read data written by new schema. + * {@link ForwardTill}: All previous schema versions till specified schema version can read data written by new schema. + */ +@Data +@Builder +public class BackwardAndForward { + + private final BackwardPolicy backwardPolicy; + private final ForwardPolicy forwardPolicy; + + BackwardAndForward(BackwardPolicy backwardPolicy, ForwardPolicy forwardPolicy) { + Preconditions.checkArgument(backwardPolicy != null || forwardPolicy != null); + Preconditions.checkArgument(backwardPolicy == null || backwardPolicy instanceof Backward + || backwardPolicy instanceof BackwardTill || backwardPolicy instanceof BackwardTransitive); + Preconditions.checkArgument(forwardPolicy == null || forwardPolicy instanceof Forward + || forwardPolicy instanceof ForwardTill || forwardPolicy instanceof ForwardTransitive); + this.backwardPolicy = backwardPolicy; + this.forwardPolicy = forwardPolicy; + } + + public interface BackwardPolicy { + } + + public interface ForwardPolicy { + } + + @Builder + @AllArgsConstructor + @EqualsAndHashCode + public static class Backward implements BackwardPolicy { + public static class BackwardBuilder implements ObjectBuilder { + } + } + + @Data + @Builder + @AllArgsConstructor + @EqualsAndHashCode + public static class BackwardTill implements BackwardPolicy { + private final VersionInfo versionInfo; + + public static class BackwardTillBuilder implements ObjectBuilder { + } + } + + @Builder + @EqualsAndHashCode + @AllArgsConstructor + public static class BackwardTransitive implements BackwardPolicy { + public static class BackwardTransitiveBuilder implements ObjectBuilder { + } + } + + @Builder + @AllArgsConstructor + @EqualsAndHashCode + public static class Forward implements ForwardPolicy { + public static class ForwardBuilder implements ObjectBuilder { + } + } + + @Data + @Builder + @EqualsAndHashCode + @AllArgsConstructor + public static class ForwardTill implements ForwardPolicy { + private final VersionInfo versionInfo; + public static class ForwardTillBuilder implements ObjectBuilder { + } + } + + @AllArgsConstructor + @EqualsAndHashCode + @Builder + public static class ForwardTransitive implements ForwardPolicy { + public static class ForwardTransitiveBuilder implements ObjectBuilder { + } + } + + public static class BackwardAndForwardBuilder implements ObjectBuilder { + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java index 278583539..1f6b262a4 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java @@ -1,14 +1,15 @@ /** * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * + *

* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.contract.data; +import com.google.common.base.Preconditions; import io.pravega.common.ObjectBuilder; import lombok.Builder; import lombok.Data; @@ -17,185 +18,183 @@ * Defines different Compatibility policy options for schema evolution for schemas within a group. * The choice of compatibility policy tells the Schema Registry service whether a schema should be accepted to evolve * into new schema by comparing it with one or more existing versions of the schema. - * - * {@link Type#AllowAny}: allow any changes to schema without any checks performed by the registry. - * {@link Type#DenyAll}: disables any changes to the schema for the group. - * {@link Type#Backward}: a new schema can be used to read data written by previous schema. - * {@link Type#BackwardTransitive}: a new schema can be used read data written by any of previous schemas. - * {@link Type#BackwardTill}: a new schema can be used to read data written by any of previous schemas till schema - * identified by version {@link Compatibility#backwardTill}. - * {@link Type#Forward}: previous schema can be used to read data written by new schema. - * {@link Type#ForwardTransitive}: all previous schemas can read data written by new schema. - * {@link Type#ForwardTill}: All previous schemas till schema identified by version {@link Compatibility#forwardTill} - * can read data written by new schema. - * {@link Type#Full}: both backward and forward compatibility. - * {@link Type#FullTransitive}: both backward and forward compatibility with all previous schemas. - * {@link Type#BackwardAndForwardTill}: All previous schemas till schema identified by version {@link Compatibility#forwardTill} - * can read data written by new schema. New schema can be used to read data written by any of previous schemas till schema - * identified by version {@link Compatibility#backwardTill}. + * */ @Data @Builder -public class Compatibility implements SchemaValidationRule { +public class Compatibility { /** * Enum that defines the Type of compatibility policy. */ - private final Type compatibility; - /** - * Version info to be specified if the compatibility policy choic.e is either {@link Type#backwardTill} or - * {@link Type#backwardTillAndForwardTill}. - */ - private final VersionInfo backwardTill; - /** - * Version info to be specified if the compatibility policy choice is either {@link Type#forwardTill} or - * {@link Type#backwardTillAndForwardTill}. - */ - private final VersionInfo forwardTill; - - private Compatibility(Type compatibility) { - this(compatibility, null, null); - } + private final Type type; + private final BackwardAndForward backwardAndForward; - private Compatibility(Type compatibility, VersionInfo backwardTill, VersionInfo forwardTill) { - this.compatibility = compatibility; - this.backwardTill = backwardTill; - this.forwardTill = forwardTill; + private Compatibility(Type type) { + this(type, null); } - @Override - public String getName() { - return Compatibility.class.getSimpleName(); + private Compatibility(Type type, BackwardAndForward backwardAndForward) { + Preconditions.checkArgument(!type.equals(Type.BackwardAndForward) || backwardAndForward != null); + this.type = type; + this.backwardAndForward = backwardAndForward; } + /** + * {@link Type#AllowAny}: allow any changes to schema without any checks performed by the registry. + * {@link Type#DenyAll}: disables any changes to the schema for the group. + * {@link Type#BackwardAndForward}: + */ public enum Type { AllowAny, DenyAll, - Backward, - BackwardTill, - BackwardTransitive, - Forward, - ForwardTill, - ForwardTransitive, - BackwardAndForwardTill, - Full, - FullTransitive; + BackwardAndForward, + } + + /** + * Disable compatibility check and all any schema to be registered. Effectively declares all schemas as compatible. + * + * @return Compatibility policy that allows any change. + */ + public static Compatibility allowAny() { + return new Compatibility(Type.AllowAny); + } + + /** + * Compatibility policy that disallows any new schema changes. Effecfively rejects all schemas and declares them incompatible. + * + * @return Compatibility policy that denies all changes. + */ + public static Compatibility denyAll() { + return new Compatibility(Type.DenyAll); } + private static Compatibility backwardAndForward(BackwardAndForward backwardAndForward) { + return new Compatibility(Type.BackwardAndForward, backwardAndForward); + } + /** - * Method to create a compatibility policy of type backward. Backward policy implies new schema will be validated + * Method to create a compatibility policy of type backwardPolicy. BackwardPolicy policy implies new schema will be validated * to be capable of reading data written using the previous schema. - * - * @return Compatibility with Type.Backward. + * + * @return Compatibility policy with Backward check. */ public static Compatibility backward() { - return new Compatibility(Type.Backward); + return backwardAndForward(new BackwardAndForward(new BackwardAndForward.Backward(), null)); } /** - * Method to create a compatibility policy of type backward till. BackwardTill policy implies new schema will be validated + * Method to create a compatibility policy of type backwardPolicy till. BackwardTill policy implies new schema will be validated * to be capable of reading data written using the all previous schemas till version supplied as input. - * + * * @param backwardTill version till which schemas should be checked for compatibility. - * @return Compatibility with Type.BackwardTill version. + * @return Compatibility policy with BackwardTill version check. */ public static Compatibility backwardTill(VersionInfo backwardTill) { - return new Compatibility(Type.BackwardTill, backwardTill, null); + return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.BackwardTill(backwardTill), null)); } /** - * Method to create a compatibility policy of type backward transitive. Backward transitive policy implies + * Method to create a compatibility policy of type backwardPolicy transitive. BackwardPolicy transitive policy implies * new schema will be validated to be capable of reading data written using the all previous schemas versions. - * - * @return Compatibility with Type.BackwardTransitive. + * + * @return Compatibility policy with BackwardTransitive check. */ public static Compatibility backwardTransitive() { - return new Compatibility(Type.BackwardTransitive); + return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.BackwardTransitive(), null)); } /** - * Method to create a compatibility policy of type forward. Forward policy implies new schema will be validated + * Method to create a compatibility policy of type forward. ForwardPolicy policy implies new schema will be validated * such that data written using new schema can be read using the previous schema. - * - * @return Compatibility with Type.Forward + * + * @return Compatibility policy with Forward compatibility check. */ public static Compatibility forward() { - return new Compatibility(Type.Forward); + return Compatibility.backwardAndForward(new BackwardAndForward(null, new BackwardAndForward.Forward())); } /** - * Method to create a compatibility policy of type forward till. Forward policy implies new schema will be validated + * Method to create a compatibility policy of type forward till. ForwardPolicy policy implies new schema will be validated * such that data written using new schema can be read using the all previous schemas till supplied version. * * @param forwardTill version till which schemas should be checked for compatibility. - * @return Compatibility with Type.ForwardTill version. + * @return Compatibility policy with ForwardTill check. */ public static Compatibility forwardTill(VersionInfo forwardTill) { - return new Compatibility(Type.ForwardTill, null, forwardTill); + return Compatibility.backwardAndForward(new BackwardAndForward(null, new BackwardAndForward.ForwardTill(forwardTill))); } /** * Method to create a compatibility policy of type forward transitive. - * Forward transitive policy implies new schema will be validated such that data written using new schema + * ForwardPolicy transitive policy implies new schema will be validated such that data written using new schema * can be read using all previous schemas. - * - * @return Compatibility with Type.ForwardTransitive. + * + * @return Compatibility policy with ForwardTransitive check. */ public static Compatibility forwardTransitive() { - return new Compatibility(Type.ForwardTransitive); + return Compatibility.backwardAndForward(new BackwardAndForward(null, new BackwardAndForward.ForwardTransitive())); } /** - * Method to create a compatibility policy of type full. Full means backward and forward compatibility check with + * Method to create a compatibility policy of type full. Full means backwardPolicy and forward compatibility check with * previous schema version. Which means new schema can be used to read data written with previous schema and vice versa. - * - * @return Compatibility with Type.Full. + * + * @return Compatibility policy with Backward and Forward compatibility checks. */ public static Compatibility full() { - return new Compatibility(Type.Full); + return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.Backward(), new BackwardAndForward.Forward())); } /** - * Method to create a compatibility policy of type full transitive. - * Full transitive means backward and forward compatibility check with all previous schema version. + * Method to create a compatibility policy of type full transitive. + * Full transitive means backwardPolicy and forward compatibility check with all previous schema version. * This implies new schema can be used to read data written with any of the previous schemas and vice versa. * - * @return Compatibility with Type.FullTransitive. + * @return Compatibility policy of type Backward Transitive and Forward Transitive checks. */ public static Compatibility fullTransitive() { - return new Compatibility(Type.FullTransitive); + return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.BackwardTransitive(), new BackwardAndForward.ForwardTransitive())); } /** - * Method to create a compatibility policy of type backward till and forward till. This is a combination of - * backward till and forward till policies. - * All previous schemas till schema identified by version {@link Compatibility#forwardTill} + * Method to create a schemaValidationRules policy of type backwardPolicy till and forwardOne till. This is a combination of + * backwardPolicy till and forwardOne till policies. + * All previous schemas till schema identified by version specified with {@link BackwardAndForward.BackwardTill} policy * can read data written by new schema. New schema can be used to read data written by any of previous schemas till schema - * identified by version {@link Compatibility#backwardTill}. - * - * @param backwardTill version till which backward compatibility is checked for. - * @param forwardTill version till which forward compatibility is checked for. - * @return Compatibility with Type.FullTransitive. + * identified by version {@link BackwardAndForward.ForwardTill}. + * + * @param backwardTill version till which backwardPolicy schemaValidationRules is checked for. + * @param forwardTill version till which forwardOne schemaValidationRules is checked for. + * @return Compatibility policy with backwardTill check And ForwardTill check. */ public static Compatibility backwardTillAndForwardTill(VersionInfo backwardTill, VersionInfo forwardTill) { - return new Compatibility(Type.BackwardAndForwardTill, backwardTill, forwardTill); + return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.BackwardTill(backwardTill), new BackwardAndForward.ForwardTill(forwardTill))); } /** - * Disable compatibility check and all any schema to be registered. Effectively declares all schemas as compatible. - * - * @return Compatibility with Type.AllowAny + * Method to create a schemaValidationRules policy of type backwardPolicy one and forwardOne till. + * + * All previous schemas till schema identified by version {@link BackwardAndForward.ForwardTill} + * can read data written by new schema. New schema can be used to read data written by previous schema. + * + * @param forwardTill version till which forwardTill schemaValidationRules is checked for. + * @return Compatibility policy that describes backward check And ForwardTill check. */ - public static Compatibility allowAny() { - return new Compatibility(Type.AllowAny); + public static Compatibility backwardOneAndForwardTill(VersionInfo forwardTill) { + return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.Backward(), new BackwardAndForward.ForwardTill(forwardTill))); } /** - * Compatibility policy that disallows any new schema changes. Effecfively rejects all schemas and declares them incompatible. + * Method to create a schemaValidationRules policy of type backwardPolicy till one and forwardOne one. + * + * All previous schemas till schema identified by version {@link BackwardAndForward.BackwardTill} + * can read data written by new schema. New schema can be used to read data written by previous schema. * - * @return Compatibility with Type.DenyAll + * @param backwardTill version till which backwardTill schemaValidationRules is checked for. + * @return BackwardAndForward with backwardTill check And Forward check. */ - public static Compatibility denyAll() { - return new Compatibility(Type.DenyAll); + public static Compatibility backwardTillAndForwardOne(VersionInfo backwardTill) { + return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.BackwardTill(backwardTill), new BackwardAndForward.Forward())); } public static class CompatibilityBuilder implements ObjectBuilder { diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java index 9ae08dacc..0eee07c76 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java @@ -12,7 +12,7 @@ import lombok.Data; /** - * Describes changes to the group and the validation rules {@link GroupHistoryRecord#rules} that were + * Describes changes to the group and the compatibility {@link GroupHistoryRecord#compatibility} that were * applied while registering {@link GroupHistoryRecord#schema} and the unique {@link GroupHistoryRecord#version} identifier * that was assigned to it. * It also has {@link GroupHistoryRecord#timestamp} when the schema was added and includes an optional @@ -30,9 +30,9 @@ public class GroupHistoryRecord { */ private final VersionInfo version; /** - * Validation rules applied at the time when the schema was registered. + * Compatibility applied at the time when the schema was registered. */ - private final SchemaValidationRules rules; + private final Compatibility compatibility; /** * Service's Time when the schema was registered. */ diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupProperties.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupProperties.java index 4002ceebb..b8e8d71e5 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupProperties.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupProperties.java @@ -17,7 +17,7 @@ * Different configuration choices for a group. * * {@link GroupProperties#serializationFormat} identifies the serialization format used to describe the schema. - * {@link GroupProperties#schemaValidationRules} sets the schema validation policy that needs to be enforced for evolving schemas. + * {@link GroupProperties#compatibility} sets the schema validation policy that needs to be enforced for evolving schemas. * {@link GroupProperties#allowMultipleTypes} that specifies if multiple schemas with distinct {@link SchemaInfo#type} * are allowed to coexist within the group. A schema describes an object and each object type is distinctly identified by * {@link SchemaInfo#type}. Registry service validates new schema with existing schema versions of the same name and versions @@ -37,9 +37,9 @@ public class GroupProperties { */ private final SerializationFormat serializationFormat; /** - * Schema validation rules to be applied for the group. + * Compatibility to be applied for the group. */ - private final SchemaValidationRules schemaValidationRules; + private final Compatibility compatibility; /** * Flag to indicate whether multiple types of schemas can be added to the group or not. If set to false, all schemas * added to the group should have the same {@link SchemaInfo#type}. @@ -50,24 +50,24 @@ public class GroupProperties { */ private final ImmutableMap properties; - public GroupProperties(SerializationFormat serializationFormat, SchemaValidationRules schemaValidationRules, boolean allowMultipleTypes) { - this(serializationFormat, schemaValidationRules, allowMultipleTypes, ImmutableMap.of()); + public GroupProperties(SerializationFormat serializationFormat, Compatibility compatibility, boolean allowMultipleTypes) { + this(serializationFormat, compatibility, allowMultipleTypes, ImmutableMap.of()); } - public GroupProperties(SerializationFormat serializationFormat, SchemaValidationRules schemaValidationRules, boolean allowMultipleTypes, ImmutableMap properties) { + public GroupProperties(SerializationFormat serializationFormat, Compatibility compatibility, boolean allowMultipleTypes, ImmutableMap properties) { this.serializationFormat = serializationFormat; - this.schemaValidationRules = schemaValidationRules; + this.compatibility = compatibility; this.allowMultipleTypes = allowMultipleTypes; this.properties = properties; } public static final class GroupPropertiesBuilder { - private SchemaValidationRules schemaValidationRules = SchemaValidationRules.of(Compatibility.fullTransitive()); + private Compatibility compatibility = Compatibility.fullTransitive(); private boolean allowMultipleTypes = false; private ImmutableMap properties = ImmutableMap.of(); - public GroupPropertiesBuilder compatibility(Compatibility compatibility) { - this.schemaValidationRules = SchemaValidationRules.of(compatibility); + public GroupPropertiesBuilder compatibility(Compatibility rule) { + this.compatibility = rule; return this; } } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRule.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRule.java deleted file mode 100644 index c89670543..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRule.java +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry.contract.data; - -/** - * Base interface to define all schema validation rules. Schema validation rules are applied whenever new schemas are registered - * and only schemas that satisfy validation rules are accepted by the registry into the group. - */ -public interface SchemaValidationRule { - /** - * Name of the rule to identify it with. - * - * @return name of the rule. - */ - String getName(); -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java deleted file mode 100644 index 3725e0d0f..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaValidationRules.java +++ /dev/null @@ -1,50 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry.contract.data; - -import io.pravega.common.ObjectBuilder; -import lombok.Builder; -import lombok.Data; - -import java.util.Collections; -import java.util.Map; - -/** - * Schema validation rules that are applied for checking if a schema is valid. - * This contains a set of rules {@link SchemaValidationRule}. Currently the only rule that is supported is {@link Compatibility}. - * The schema will be compared against one or more existing schemas in the group by checking it for satisfying each of the - * rules. - */ -@Data -@Builder -public class SchemaValidationRules { - /** - * Map of schema validation rule name to corresponding schema validation rule. - */ - private final Map rules; - - private SchemaValidationRules(Map rules) { - this.rules = rules; - } - - /** - * Method to create a rule for compatibility. - * - * @param compatibility compatibility policy to be used. - * @return A singleton rules map containing the compatibility rule. - */ - public static SchemaValidationRules of(Compatibility compatibility) { - return new SchemaValidationRules(Collections.singletonMap(compatibility.getName(), compatibility)); - } - - public static class SchemaValidationRulesBuilder implements ObjectBuilder { - } - -} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java index cecb9b257..4d7523345 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java @@ -18,7 +18,7 @@ * Registry supports Avro, Protobuf and Json serialization formats but any custom type could be used with the registry using custom type. * * If a serialization format is not present in the enum it can be specified using {@link SerializationFormat#custom} with {@link SerializationFormat#customTypeName}. - * Allowed values of {@link Compatibility} mode with custom type are AllowAny or DenyAll. + * Allowed values of {@link BackwardAndForward} mode with custom type are AllowAny or DenyAll. */ public enum SerializationFormat { diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java index 3edde4c11..a8be83da9 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/VersionInfo.java @@ -17,14 +17,14 @@ /** * Version information object that encapsulates properties that uniquely identify a specific version of a schema within a group. * This is generated by the service when a new schema is added to the group. It identifies the schema that is added and includes - * the version and ordinal assigned by the service to the schema. + * the version and id assigned by the service to the schema. * * {@link VersionInfo#type} is same as {@link SchemaInfo#type} which represents the object type for which the version is computed. * {@link VersionInfo#version} the registry assigned monotonically increasing version number for the schema for specific object type. * Since the version number is per object type, so type and version number forms a unique pair. - * {@link VersionInfo#ordinal} This is an Id that is assigned by the service and uniquely identifies the schema version within - * the group. The ordinals are assigned as monotonically increasing numbers for all schemas added to the group. - * So the ordinal also represents the absolute order of the schema for all schemas in the group. + * {@link VersionInfo#id} This is an Id that is assigned by the service and uniquely identifies the schema version within + * the group. The id are assigned as monotonically increasing numbers for all schemas added to the group. + * So the id also represents the absolute order of the schema for all schemas in the group. */ @Data @Builder @@ -43,7 +43,7 @@ public class VersionInfo { * A position identifier that uniquely identifies the schema within a group and represents the order in which this * schema was included in the group. */ - private final int ordinal; + private final int id; public static class VersionInfoBuilder implements ObjectBuilder { } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/AddedTo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/AddedTo.java index 310f78ff8..1c8fa616b 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/AddedTo.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/AddedTo.java @@ -44,11 +44,11 @@ public AddedTo putGroupsItem(String key, VersionInfo groupsItem) { } /** - * Get groups + * Version for the schema in the group. * @return groups **/ @JsonProperty("groups") - @ApiModelProperty(required = true, value = "") + @ApiModelProperty(required = true, value = "Version for the schema in the group.") @NotNull public Map getGroups() { return groups; diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Backward.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Backward.java new file mode 100644 index 000000000..51ba0fe39 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Backward.java @@ -0,0 +1,92 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * BackwardPolicy compatibility type which tells the service to check for backwardPolicy compatibility with latest schema. + */ +@ApiModel(description = "BackwardPolicy compatibility type which tells the service to check for backwardPolicy compatibility with latest schema.") + +public class Backward { + @JsonProperty("name") + private String name = null; + + public Backward name(String name) { + this.name = name; + return this; + } + + /** + * Get name + * @return name + **/ + @JsonProperty("name") + @ApiModelProperty(required = true, value = "") + @NotNull + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Backward backward = (Backward) o; + return Objects.equals(this.name, backward.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class Backward {\n"); + + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardAndForward.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardAndForward.java new file mode 100644 index 000000000..4b1f8b177 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardAndForward.java @@ -0,0 +1,117 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.BackwardPolicy; +import io.pravega.schemaregistry.contract.generated.rest.model.ForwardPolicy; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * BackwardPolicy and forwardPolicy policy. + */ +@ApiModel(description = "BackwardPolicy and forwardPolicy policy.") + +public class BackwardAndForward { + @JsonProperty("backwardPolicy") + private BackwardPolicy backwardPolicy = null; + + @JsonProperty("forwardPolicy") + private ForwardPolicy forwardPolicy = null; + + public BackwardAndForward backwardPolicy(BackwardPolicy backwardPolicy) { + this.backwardPolicy = backwardPolicy; + return this; + } + + /** + * BackwardAndForward policy type that describes different types of BackwardPolicy policies like Backward, BackwardTransitive and BackwardTill. + * @return backwardPolicy + **/ + @JsonProperty("backwardPolicy") + @ApiModelProperty(value = "BackwardAndForward policy type that describes different types of BackwardPolicy policies like Backward, BackwardTransitive and BackwardTill.") + public BackwardPolicy getBackwardPolicy() { + return backwardPolicy; + } + + public void setBackwardPolicy(BackwardPolicy backwardPolicy) { + this.backwardPolicy = backwardPolicy; + } + + public BackwardAndForward forwardPolicy(ForwardPolicy forwardPolicy) { + this.forwardPolicy = forwardPolicy; + return this; + } + + /** + * BackwardAndForward policy type that describes different types of ForwardPolicy policies like Forward, ForwardTransitive and ForwardTill. + * @return forwardPolicy + **/ + @JsonProperty("forwardPolicy") + @ApiModelProperty(value = "BackwardAndForward policy type that describes different types of ForwardPolicy policies like Forward, ForwardTransitive and ForwardTill.") + public ForwardPolicy getForwardPolicy() { + return forwardPolicy; + } + + public void setForwardPolicy(ForwardPolicy forwardPolicy) { + this.forwardPolicy = forwardPolicy; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + BackwardAndForward backwardAndForward = (BackwardAndForward) o; + return Objects.equals(this.backwardPolicy, backwardAndForward.backwardPolicy) && + Objects.equals(this.forwardPolicy, backwardAndForward.forwardPolicy); + } + + @Override + public int hashCode() { + return Objects.hash(backwardPolicy, forwardPolicy); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class BackwardAndForward {\n"); + + sb.append(" backwardPolicy: ").append(toIndentedString(backwardPolicy)).append("\n"); + sb.append(" forwardPolicy: ").append(toIndentedString(forwardPolicy)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardPolicy.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardPolicy.java new file mode 100644 index 000000000..cba1d0446 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardPolicy.java @@ -0,0 +1,92 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * BackwardPolicy policy. + */ +@ApiModel(description = "BackwardPolicy policy.") + +public class BackwardPolicy { + @JsonProperty("backwardPolicy") + private Object backwardPolicy = null; + + public BackwardPolicy backwardPolicy(Object backwardPolicy) { + this.backwardPolicy = backwardPolicy; + return this; + } + + /** + * BackwardAndForward type backwardPolicy. Can be one of Backward, backwardTill and backwardTransitive. + * @return backwardPolicy + **/ + @JsonProperty("backwardPolicy") + @ApiModelProperty(required = true, value = "BackwardAndForward type backwardPolicy. Can be one of Backward, backwardTill and backwardTransitive.") + @NotNull + public Object getBackwardPolicy() { + return backwardPolicy; + } + + public void setBackwardPolicy(Object backwardPolicy) { + this.backwardPolicy = backwardPolicy; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + BackwardPolicy backwardPolicy = (BackwardPolicy) o; + return Objects.equals(this.backwardPolicy, backwardPolicy.backwardPolicy); + } + + @Override + public int hashCode() { + return Objects.hash(backwardPolicy); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class BackwardPolicy {\n"); + + sb.append(" backwardPolicy: ").append(toIndentedString(backwardPolicy)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTill.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTill.java new file mode 100644 index 000000000..f4e383a23 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTill.java @@ -0,0 +1,118 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * BackwardPolicy compatibility which tells the service to check for backwardPolicy compatibility with all previous schemas till specific version. + */ +@ApiModel(description = "BackwardPolicy compatibility which tells the service to check for backwardPolicy compatibility with all previous schemas till specific version.") + +public class BackwardTill { + @JsonProperty("name") + private String name = null; + + @JsonProperty("version") + private VersionInfo version = null; + + public BackwardTill name(String name) { + this.name = name; + return this; + } + + /** + * Get name + * @return name + **/ + @JsonProperty("name") + @ApiModelProperty(required = true, value = "") + @NotNull + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public BackwardTill version(VersionInfo version) { + this.version = version; + return this; + } + + /** + * Whether given schema is valid with respect to existing group schemas against the configured compatibility. + * @return version + **/ + @JsonProperty("version") + @ApiModelProperty(required = true, value = "Whether given schema is valid with respect to existing group schemas against the configured compatibility.") + @NotNull + public VersionInfo getVersion() { + return version; + } + + public void setVersion(VersionInfo version) { + this.version = version; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + BackwardTill backwardTill = (BackwardTill) o; + return Objects.equals(this.name, backwardTill.name) && + Objects.equals(this.version, backwardTill.version); + } + + @Override + public int hashCode() { + return Objects.hash(name, version); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class BackwardTill {\n"); + + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" version: ").append(toIndentedString(version)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTransitive.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTransitive.java new file mode 100644 index 000000000..6dac3a55e --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTransitive.java @@ -0,0 +1,92 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * BackwardPolicy compatibility type which tells the service to check for backwardPolicy compatibility with all previous schemas. + */ +@ApiModel(description = "BackwardPolicy compatibility type which tells the service to check for backwardPolicy compatibility with all previous schemas.") + +public class BackwardTransitive { + @JsonProperty("name") + private String name = null; + + public BackwardTransitive name(String name) { + this.name = name; + return this; + } + + /** + * Get name + * @return name + **/ + @JsonProperty("name") + @ApiModelProperty(required = true, value = "") + @NotNull + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + BackwardTransitive backwardTransitive = (BackwardTransitive) o; + return Objects.equals(this.name, backwardTransitive.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class BackwardTransitive {\n"); + + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CanRead.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CanRead.java index 5f101741a..64d7db85f 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CanRead.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CanRead.java @@ -35,11 +35,11 @@ public CanRead compatible(Boolean compatible) { } /** - * Whether given schema is compatible and can be used for reads. Compatibility is checked against existing group schemas subject to group's configured compatibility policy. + * Whether given schema is compatible and can be used for reads. BackwardAndForward is checked against existing group schemas subject to group's configured compatibility policy. * @return compatible **/ @JsonProperty("compatible") - @ApiModelProperty(required = true, value = "Whether given schema is compatible and can be used for reads. Compatibility is checked against existing group schemas subject to group's configured compatibility policy.") + @ApiModelProperty(required = true, value = "Whether given schema is compatible and can be used for reads. BackwardAndForward is checked against existing group schemas subject to group's configured compatibility policy.") @NotNull public Boolean isCompatible() { return compatible; diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java index 459893324..0f7805e31 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java @@ -17,20 +17,17 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; -import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.BackwardAndForward; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import javax.validation.constraints.*; /** - * Schema Compatibility validation rule. + * Compatibility policy. */ -@ApiModel(description = "Schema Compatibility validation rule.") +@ApiModel(description = "Compatibility policy.") public class Compatibility { - @JsonProperty("name") - private String name = null; - /** * Compatibility policy enum. */ @@ -39,23 +36,7 @@ public enum PolicyEnum { DENYALL("DenyAll"), - BACKWARD("Backward"), - - FORWARD("Forward"), - - FORWARDTRANSITIVE("ForwardTransitive"), - - BACKWARDTRANSITIVE("BackwardTransitive"), - - BACKWARDTILL("BackwardTill"), - - FORWARDTILL("ForwardTill"), - - BACKWARDANDFORWARDTILL("BackwardAndForwardTill"), - - FULL("Full"), - - FULLTRANSITIVE("FullTransitive"); + BACKWARDANDFORWARD("BackwardAndForward"); private String value; @@ -83,31 +64,8 @@ public static PolicyEnum fromValue(String text) { @JsonProperty("policy") private PolicyEnum policy = null; - @JsonProperty("backwardTill") - private VersionInfo backwardTill = null; - - @JsonProperty("forwardTill") - private VersionInfo forwardTill = null; - - public Compatibility name(String name) { - this.name = name; - return this; - } - - /** - * Name is used to identify the type of SchemaValidationRule. For Compatibility rule the name should be \"Compatibility\". - * @return name - **/ - @JsonProperty("name") - @ApiModelProperty(required = true, value = "Name is used to identify the type of SchemaValidationRule. For Compatibility rule the name should be \"Compatibility\".") - @NotNull - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } + @JsonProperty("backwardAndForward") + private BackwardAndForward backwardAndForward = null; public Compatibility policy(PolicyEnum policy) { this.policy = policy; @@ -129,42 +87,23 @@ public void setPolicy(PolicyEnum policy) { this.policy = policy; } - public Compatibility backwardTill(VersionInfo backwardTill) { - this.backwardTill = backwardTill; - return this; - } - - /** - * Version for backward till if policy is BackwardTill or BackwardAndForwardTill. - * @return backwardTill - **/ - @JsonProperty("backwardTill") - @ApiModelProperty(value = "Version for backward till if policy is BackwardTill or BackwardAndForwardTill.") - public VersionInfo getBackwardTill() { - return backwardTill; - } - - public void setBackwardTill(VersionInfo backwardTill) { - this.backwardTill = backwardTill; - } - - public Compatibility forwardTill(VersionInfo forwardTill) { - this.forwardTill = forwardTill; + public Compatibility backwardAndForward(BackwardAndForward backwardAndForward) { + this.backwardAndForward = backwardAndForward; return this; } /** - * Version for forward till if policy is ForwardTill or BackwardAndForwardTill. - * @return forwardTill + * Backward and forward policy details. + * @return backwardAndForward **/ - @JsonProperty("forwardTill") - @ApiModelProperty(value = "Version for forward till if policy is ForwardTill or BackwardAndForwardTill.") - public VersionInfo getForwardTill() { - return forwardTill; + @JsonProperty("backwardAndForward") + @ApiModelProperty(value = "Backward and forward policy details.") + public BackwardAndForward getBackwardAndForward() { + return backwardAndForward; } - public void setForwardTill(VersionInfo forwardTill) { - this.forwardTill = forwardTill; + public void setBackwardAndForward(BackwardAndForward backwardAndForward) { + this.backwardAndForward = backwardAndForward; } @@ -177,15 +116,13 @@ public boolean equals(java.lang.Object o) { return false; } Compatibility compatibility = (Compatibility) o; - return Objects.equals(this.name, compatibility.name) && - Objects.equals(this.policy, compatibility.policy) && - Objects.equals(this.backwardTill, compatibility.backwardTill) && - Objects.equals(this.forwardTill, compatibility.forwardTill); + return Objects.equals(this.policy, compatibility.policy) && + Objects.equals(this.backwardAndForward, compatibility.backwardAndForward); } @Override public int hashCode() { - return Objects.hash(name, policy, backwardTill, forwardTill); + return Objects.hash(policy, backwardAndForward); } @@ -194,10 +131,8 @@ public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Compatibility {\n"); - sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" policy: ").append(toIndentedString(policy)).append("\n"); - sb.append(" backwardTill: ").append(toIndentedString(backwardTill)).append("\n"); - sb.append(" forwardTill: ").append(toIndentedString(forwardTill)).append("\n"); + sb.append(" backwardAndForward: ").append(toIndentedString(backwardAndForward)).append("\n"); sb.append("}"); return sb.toString(); } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Forward.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Forward.java new file mode 100644 index 000000000..9dcd5bfe3 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Forward.java @@ -0,0 +1,92 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * ForwardPolicy compatibility type which tells the service to check for forwardPolicy compatibilty with latest schema. + */ +@ApiModel(description = "ForwardPolicy compatibility type which tells the service to check for forwardPolicy compatibilty with latest schema.") + +public class Forward { + @JsonProperty("name") + private String name = null; + + public Forward name(String name) { + this.name = name; + return this; + } + + /** + * Get name + * @return name + **/ + @JsonProperty("name") + @ApiModelProperty(required = true, value = "") + @NotNull + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Forward forward = (Forward) o; + return Objects.equals(this.name, forward.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class Forward {\n"); + + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardPolicy.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardPolicy.java new file mode 100644 index 000000000..1803110c1 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardPolicy.java @@ -0,0 +1,92 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * ForwardPolicy policy. + */ +@ApiModel(description = "ForwardPolicy policy.") + +public class ForwardPolicy { + @JsonProperty("forwardPolicy") + private Object forwardPolicy = null; + + public ForwardPolicy forwardPolicy(Object forwardPolicy) { + this.forwardPolicy = forwardPolicy; + return this; + } + + /** + * BackwardAndForward type forwardPolicy. Can be one of forward, forwardTill and forwardTransitive. + * @return forwardPolicy + **/ + @JsonProperty("forwardPolicy") + @ApiModelProperty(required = true, value = "BackwardAndForward type forwardPolicy. Can be one of forward, forwardTill and forwardTransitive.") + @NotNull + public Object getForwardPolicy() { + return forwardPolicy; + } + + public void setForwardPolicy(Object forwardPolicy) { + this.forwardPolicy = forwardPolicy; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ForwardPolicy forwardPolicy = (ForwardPolicy) o; + return Objects.equals(this.forwardPolicy, forwardPolicy.forwardPolicy); + } + + @Override + public int hashCode() { + return Objects.hash(forwardPolicy); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ForwardPolicy {\n"); + + sb.append(" forwardPolicy: ").append(toIndentedString(forwardPolicy)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTill.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTill.java new file mode 100644 index 000000000..e2dd0b840 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTill.java @@ -0,0 +1,118 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * ForwardPolicy compatibility which tells the service to check for forwardPolicy compatibility with all previous schemas till specific version. + */ +@ApiModel(description = "ForwardPolicy compatibility which tells the service to check for forwardPolicy compatibility with all previous schemas till specific version.") + +public class ForwardTill { + @JsonProperty("name") + private String name = null; + + @JsonProperty("version") + private VersionInfo version = null; + + public ForwardTill name(String name) { + this.name = name; + return this; + } + + /** + * Get name + * @return name + **/ + @JsonProperty("name") + @ApiModelProperty(required = true, value = "") + @NotNull + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public ForwardTill version(VersionInfo version) { + this.version = version; + return this; + } + + /** + * Whether given schema is valid with respect to existing group schemas against the configured compatibility. + * @return version + **/ + @JsonProperty("version") + @ApiModelProperty(required = true, value = "Whether given schema is valid with respect to existing group schemas against the configured compatibility.") + @NotNull + public VersionInfo getVersion() { + return version; + } + + public void setVersion(VersionInfo version) { + this.version = version; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ForwardTill forwardTill = (ForwardTill) o; + return Objects.equals(this.name, forwardTill.name) && + Objects.equals(this.version, forwardTill.version); + } + + @Override + public int hashCode() { + return Objects.hash(name, version); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ForwardTill {\n"); + + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" version: ").append(toIndentedString(version)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRule.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTransitive.java similarity index 54% rename from contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRule.java rename to contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTransitive.java index 9fb9ee11d..2adb2e41f 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRule.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTransitive.java @@ -21,32 +21,32 @@ import javax.validation.constraints.*; /** - * Schema validation rule base class. + * ForwardPolicy compatibility type which tells the service to check for forwardPolicy compatibility with all previous schemas. */ -@ApiModel(description = "Schema validation rule base class.") +@ApiModel(description = "ForwardPolicy compatibility type which tells the service to check for forwardPolicy compatibility with all previous schemas.") -public class SchemaValidationRule { - @JsonProperty("rule") - private Object rule = null; +public class ForwardTransitive { + @JsonProperty("name") + private String name = null; - public SchemaValidationRule rule(Object rule) { - this.rule = rule; + public ForwardTransitive name(String name) { + this.name = name; return this; } /** - * Specific schema validation rule. The only rule we have presently is Compatibility. The \"name\" is used to identify specific Rule type. The only rule supported in this is Compatibility. - * @return rule + * Get name + * @return name **/ - @JsonProperty("rule") - @ApiModelProperty(required = true, value = "Specific schema validation rule. The only rule we have presently is Compatibility. The \"name\" is used to identify specific Rule type. The only rule supported in this is Compatibility.") + @JsonProperty("name") + @ApiModelProperty(required = true, value = "") @NotNull - public Object getRule() { - return rule; + public String getName() { + return name; } - public void setRule(Object rule) { - this.rule = rule; + public void setName(String name) { + this.name = name; } @@ -58,22 +58,22 @@ public boolean equals(java.lang.Object o) { if (o == null || getClass() != o.getClass()) { return false; } - SchemaValidationRule schemaValidationRule = (SchemaValidationRule) o; - return Objects.equals(this.rule, schemaValidationRule.rule); + ForwardTransitive forwardTransitive = (ForwardTransitive) o; + return Objects.equals(this.name, forwardTransitive.name); } @Override public int hashCode() { - return Objects.hash(rule); + return Objects.hash(name); } @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class SchemaValidationRule {\n"); + sb.append("class ForwardTransitive {\n"); - sb.append(" rule: ").append(toIndentedString(rule)).append("\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append("}"); return sb.toString(); } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java index 6d7dd7476..d993a454c 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java @@ -16,17 +16,17 @@ import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.Compatibility; import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import javax.validation.constraints.*; /** - * Group History Record that describes each schema evolution - schema information, version generated for the schema, time and rules used for schema validation. + * Group History Record that describes each schema evolution - schema information, version generated for the schema, time and compatibility policy used for schema validation. */ -@ApiModel(description = "Group History Record that describes each schema evolution - schema information, version generated for the schema, time and rules used for schema validation.") +@ApiModel(description = "Group History Record that describes each schema evolution - schema information, version generated for the schema, time and compatibility policy used for schema validation.") public class GroupHistoryRecord { @JsonProperty("schemaInfo") @@ -35,8 +35,8 @@ public class GroupHistoryRecord { @JsonProperty("version") private VersionInfo version = null; - @JsonProperty("validationRules") - private SchemaValidationRules validationRules = null; + @JsonProperty("compatibility") + private Compatibility compatibility = null; @JsonProperty("timestamp") private Long timestamp = null; @@ -84,24 +84,24 @@ public void setVersion(VersionInfo version) { this.version = version; } - public GroupHistoryRecord validationRules(SchemaValidationRules validationRules) { - this.validationRules = validationRules; + public GroupHistoryRecord compatibility(Compatibility compatibility) { + this.compatibility = compatibility; return this; } /** - * Schema validation rules applied. - * @return validationRules + * Schema compatibility applied. + * @return compatibility **/ - @JsonProperty("validationRules") - @ApiModelProperty(required = true, value = "Schema validation rules applied.") + @JsonProperty("compatibility") + @ApiModelProperty(required = true, value = "Schema compatibility applied.") @NotNull - public SchemaValidationRules getValidationRules() { - return validationRules; + public Compatibility getCompatibility() { + return compatibility; } - public void setValidationRules(SchemaValidationRules validationRules) { - this.validationRules = validationRules; + public void setCompatibility(Compatibility compatibility) { + this.compatibility = compatibility; } public GroupHistoryRecord timestamp(Long timestamp) { @@ -155,14 +155,14 @@ public boolean equals(java.lang.Object o) { GroupHistoryRecord groupHistoryRecord = (GroupHistoryRecord) o; return Objects.equals(this.schemaInfo, groupHistoryRecord.schemaInfo) && Objects.equals(this.version, groupHistoryRecord.version) && - Objects.equals(this.validationRules, groupHistoryRecord.validationRules) && + Objects.equals(this.compatibility, groupHistoryRecord.compatibility) && Objects.equals(this.timestamp, groupHistoryRecord.timestamp) && Objects.equals(this.schemaString, groupHistoryRecord.schemaString); } @Override public int hashCode() { - return Objects.hash(schemaInfo, version, validationRules, timestamp, schemaString); + return Objects.hash(schemaInfo, version, compatibility, timestamp, schemaString); } @@ -173,7 +173,7 @@ public String toString() { sb.append(" schemaInfo: ").append(toIndentedString(schemaInfo)).append("\n"); sb.append(" version: ").append(toIndentedString(version)).append("\n"); - sb.append(" validationRules: ").append(toIndentedString(validationRules)).append("\n"); + sb.append(" compatibility: ").append(toIndentedString(compatibility)).append("\n"); sb.append(" timestamp: ").append(toIndentedString(timestamp)).append("\n"); sb.append(" schemaString: ").append(toIndentedString(schemaString)).append("\n"); sb.append("}"); diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupProperties.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupProperties.java index 4bbb60b12..dac7067a6 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupProperties.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupProperties.java @@ -16,7 +16,7 @@ import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; +import io.pravega.schemaregistry.contract.generated.rest.model.Compatibility; import io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; @@ -34,8 +34,8 @@ public class GroupProperties { @JsonProperty("serializationFormat") private SerializationFormat serializationFormat = null; - @JsonProperty("schemaValidationRules") - private SchemaValidationRules schemaValidationRules = null; + @JsonProperty("compatibility") + private Compatibility compatibility = null; @JsonProperty("allowMultipleTypes") private Boolean allowMultipleTypes = null; @@ -63,24 +63,24 @@ public void setSerializationFormat(SerializationFormat serializationFormat) { this.serializationFormat = serializationFormat; } - public GroupProperties schemaValidationRules(SchemaValidationRules schemaValidationRules) { - this.schemaValidationRules = schemaValidationRules; + public GroupProperties compatibility(Compatibility compatibility) { + this.compatibility = compatibility; return this; } /** - * Validation rules to apply while registering new schema. - * @return schemaValidationRules + * Compatibility to apply while registering new schema. + * @return compatibility **/ - @JsonProperty("schemaValidationRules") - @ApiModelProperty(required = true, value = "Validation rules to apply while registering new schema.") + @JsonProperty("compatibility") + @ApiModelProperty(required = true, value = "Compatibility to apply while registering new schema.") @NotNull - public SchemaValidationRules getSchemaValidationRules() { - return schemaValidationRules; + public Compatibility getCompatibility() { + return compatibility; } - public void setSchemaValidationRules(SchemaValidationRules schemaValidationRules) { - this.schemaValidationRules = schemaValidationRules; + public void setCompatibility(Compatibility compatibility) { + this.compatibility = compatibility; } public GroupProperties allowMultipleTypes(Boolean allowMultipleTypes) { @@ -141,14 +141,14 @@ public boolean equals(java.lang.Object o) { } GroupProperties groupProperties = (GroupProperties) o; return Objects.equals(this.serializationFormat, groupProperties.serializationFormat) && - Objects.equals(this.schemaValidationRules, groupProperties.schemaValidationRules) && + Objects.equals(this.compatibility, groupProperties.compatibility) && Objects.equals(this.allowMultipleTypes, groupProperties.allowMultipleTypes) && Objects.equals(this.properties, groupProperties.properties); } @Override public int hashCode() { - return Objects.hash(serializationFormat, schemaValidationRules, allowMultipleTypes, properties); + return Objects.hash(serializationFormat, compatibility, allowMultipleTypes, properties); } @@ -158,7 +158,7 @@ public String toString() { sb.append("class GroupProperties {\n"); sb.append(" serializationFormat: ").append(toIndentedString(serializationFormat)).append("\n"); - sb.append(" schemaValidationRules: ").append(toIndentedString(schemaValidationRules)).append("\n"); + sb.append(" compatibility: ").append(toIndentedString(compatibility)).append("\n"); sb.append(" allowMultipleTypes: ").append(toIndentedString(allowMultipleTypes)).append("\n"); sb.append(" properties: ").append(toIndentedString(properties)).append("\n"); sb.append("}"); diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaInfo.java index 2be4282ab..697c1ae1f 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaInfo.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaInfo.java @@ -142,7 +142,7 @@ public boolean equals(java.lang.Object o) { SchemaInfo schemaInfo = (SchemaInfo) o; return Objects.equals(this.type, schemaInfo.type) && Objects.equals(this.serializationFormat, schemaInfo.serializationFormat) && - Arrays.equals(this.schemaData, schemaInfo.schemaData) && + Arrays.equals(this.schemaData, schemaInfo.schemaData) && Objects.equals(this.properties, schemaInfo.properties); } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRules.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRules.java deleted file mode 100644 index 0f9d7af0b..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaValidationRules.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Pravega Schema Registry APIs - * REST APIs for Pravega Schema Registry. - * - * OpenAPI spec version: 0.0.1 - * - * - * NOTE: This class is auto generated by the swagger code generator program. - * https://github.com/swagger-api/swagger-codegen.git - * Do not edit the class manually. - */ - - -package io.pravega.schemaregistry.contract.generated.rest.model; - -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRule; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import javax.validation.constraints.*; - -/** - * Schema validation rules to be applied for new schema addition. Currently only one rule is supported - Compatibility. - */ -@ApiModel(description = "Schema validation rules to be applied for new schema addition. Currently only one rule is supported - Compatibility.") - -public class SchemaValidationRules { - @JsonProperty("rules") - private Map rules = null; - - public SchemaValidationRules rules(Map rules) { - this.rules = rules; - return this; - } - - public SchemaValidationRules putRulesItem(String key, SchemaValidationRule rulesItem) { - if (this.rules == null) { - this.rules = new HashMap(); - } - this.rules.put(key, rulesItem); - return this; - } - - /** - * Get rules - * @return rules - **/ - @JsonProperty("rules") - @ApiModelProperty(value = "") - public Map getRules() { - return rules; - } - - public void setRules(Map rules) { - this.rules = rules; - } - - - @Override - public boolean equals(java.lang.Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - SchemaValidationRules schemaValidationRules = (SchemaValidationRules) o; - return Objects.equals(this.rules, schemaValidationRules.rules); - } - - @Override - public int hashCode() { - return Objects.hash(rules); - } - - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("class SchemaValidationRules {\n"); - - sb.append(" rules: ").append(toIndentedString(rules)).append("\n"); - sb.append("}"); - return sb.toString(); - } - - /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). - */ - private String toIndentedString(java.lang.Object o) { - if (o == null) { - return "null"; - } - return o.toString().replace("\n", "\n "); - } -} - diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateCompatibilityRequest.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateCompatibilityRequest.java new file mode 100644 index 000000000..9b8e6720e --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateCompatibilityRequest.java @@ -0,0 +1,116 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.Compatibility; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import javax.validation.constraints.*; + +/** + * UpdateCompatibilityRequest + */ + +public class UpdateCompatibilityRequest { + @JsonProperty("compatibility") + private Compatibility compatibility = null; + + @JsonProperty("previousCompatibility") + private Compatibility previousCompatibility = null; + + public UpdateCompatibilityRequest compatibility(Compatibility compatibility) { + this.compatibility = compatibility; + return this; + } + + /** + * Get compatibility + * @return compatibility + **/ + @JsonProperty("compatibility") + @ApiModelProperty(required = true, value = "") + @NotNull + public Compatibility getCompatibility() { + return compatibility; + } + + public void setCompatibility(Compatibility compatibility) { + this.compatibility = compatibility; + } + + public UpdateCompatibilityRequest previousCompatibility(Compatibility previousCompatibility) { + this.previousCompatibility = previousCompatibility; + return this; + } + + /** + * Get previousCompatibility + * @return previousCompatibility + **/ + @JsonProperty("previousCompatibility") + @ApiModelProperty(value = "") + public Compatibility getPreviousCompatibility() { + return previousCompatibility; + } + + public void setPreviousCompatibility(Compatibility previousCompatibility) { + this.previousCompatibility = previousCompatibility; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + UpdateCompatibilityRequest updateCompatibilityRequest = (UpdateCompatibilityRequest) o; + return Objects.equals(this.compatibility, updateCompatibilityRequest.compatibility) && + Objects.equals(this.previousCompatibility, updateCompatibilityRequest.previousCompatibility); + } + + @Override + public int hashCode() { + return Objects.hash(compatibility, previousCompatibility); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class UpdateCompatibilityRequest {\n"); + + sb.append(" compatibility: ").append(toIndentedString(compatibility)).append("\n"); + sb.append(" previousCompatibility: ").append(toIndentedString(previousCompatibility)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateValidationRulesRequest.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateValidationRulesRequest.java deleted file mode 100644 index 92cdef2d9..000000000 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateValidationRulesRequest.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Pravega Schema Registry APIs - * REST APIs for Pravega Schema Registry. - * - * OpenAPI spec version: 0.0.1 - * - * - * NOTE: This class is auto generated by the swagger code generator program. - * https://github.com/swagger-api/swagger-codegen.git - * Do not edit the class manually. - */ - - -package io.pravega.schemaregistry.contract.generated.rest.model; - -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; -import io.swagger.annotations.ApiModel; -import io.swagger.annotations.ApiModelProperty; -import javax.validation.constraints.*; - -/** - * UpdateValidationRulesRequest - */ - -public class UpdateValidationRulesRequest { - @JsonProperty("validationRules") - private SchemaValidationRules validationRules = null; - - @JsonProperty("previousRules") - private SchemaValidationRules previousRules = null; - - public UpdateValidationRulesRequest validationRules(SchemaValidationRules validationRules) { - this.validationRules = validationRules; - return this; - } - - /** - * Get validationRules - * @return validationRules - **/ - @JsonProperty("validationRules") - @ApiModelProperty(required = true, value = "") - @NotNull - public SchemaValidationRules getValidationRules() { - return validationRules; - } - - public void setValidationRules(SchemaValidationRules validationRules) { - this.validationRules = validationRules; - } - - public UpdateValidationRulesRequest previousRules(SchemaValidationRules previousRules) { - this.previousRules = previousRules; - return this; - } - - /** - * Get previousRules - * @return previousRules - **/ - @JsonProperty("previousRules") - @ApiModelProperty(value = "") - public SchemaValidationRules getPreviousRules() { - return previousRules; - } - - public void setPreviousRules(SchemaValidationRules previousRules) { - this.previousRules = previousRules; - } - - - @Override - public boolean equals(java.lang.Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - UpdateValidationRulesRequest updateValidationRulesRequest = (UpdateValidationRulesRequest) o; - return Objects.equals(this.validationRules, updateValidationRulesRequest.validationRules) && - Objects.equals(this.previousRules, updateValidationRulesRequest.previousRules); - } - - @Override - public int hashCode() { - return Objects.hash(validationRules, previousRules); - } - - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("class UpdateValidationRulesRequest {\n"); - - sb.append(" validationRules: ").append(toIndentedString(validationRules)).append("\n"); - sb.append(" previousRules: ").append(toIndentedString(previousRules)).append("\n"); - sb.append("}"); - return sb.toString(); - } - - /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). - */ - private String toIndentedString(java.lang.Object o) { - if (o == null) { - return "null"; - } - return o.toString().replace("\n", "\n "); - } -} - diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Valid.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Valid.java index bde7b3f10..69a09ba93 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Valid.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Valid.java @@ -35,11 +35,11 @@ public Valid valid(Boolean valid) { } /** - * Whether given schema is valid with respect to existing group schemas against the configured validation rules. + * Whether given schema is valid with respect to existing group schemas against the configured compatibility. * @return valid **/ @JsonProperty("valid") - @ApiModelProperty(required = true, value = "Whether given schema is valid with respect to existing group schemas against the configured validation rules.") + @ApiModelProperty(required = true, value = "Whether given schema is valid with respect to existing group schemas against the configured compatibility.") @NotNull public Boolean isValid() { return valid; diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ValidateRequest.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ValidateRequest.java index 5daa183df..b6a4c974a 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ValidateRequest.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ValidateRequest.java @@ -16,8 +16,8 @@ import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.Compatibility; import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import javax.validation.constraints.*; @@ -30,8 +30,8 @@ public class ValidateRequest { @JsonProperty("schemaInfo") private SchemaInfo schemaInfo = null; - @JsonProperty("validationRules") - private SchemaValidationRules validationRules = null; + @JsonProperty("compatibility") + private Compatibility compatibility = null; public ValidateRequest schemaInfo(SchemaInfo schemaInfo) { this.schemaInfo = schemaInfo; @@ -53,23 +53,23 @@ public void setSchemaInfo(SchemaInfo schemaInfo) { this.schemaInfo = schemaInfo; } - public ValidateRequest validationRules(SchemaValidationRules validationRules) { - this.validationRules = validationRules; + public ValidateRequest compatibility(Compatibility compatibility) { + this.compatibility = compatibility; return this; } /** - * Get validationRules - * @return validationRules + * Get compatibility + * @return compatibility **/ - @JsonProperty("validationRules") + @JsonProperty("compatibility") @ApiModelProperty(value = "") - public SchemaValidationRules getValidationRules() { - return validationRules; + public Compatibility getCompatibility() { + return compatibility; } - public void setValidationRules(SchemaValidationRules validationRules) { - this.validationRules = validationRules; + public void setCompatibility(Compatibility compatibility) { + this.compatibility = compatibility; } @@ -83,12 +83,12 @@ public boolean equals(java.lang.Object o) { } ValidateRequest validateRequest = (ValidateRequest) o; return Objects.equals(this.schemaInfo, validateRequest.schemaInfo) && - Objects.equals(this.validationRules, validateRequest.validationRules); + Objects.equals(this.compatibility, validateRequest.compatibility); } @Override public int hashCode() { - return Objects.hash(schemaInfo, validationRules); + return Objects.hash(schemaInfo, compatibility); } @@ -98,7 +98,7 @@ public String toString() { sb.append("class ValidateRequest {\n"); sb.append(" schemaInfo: ").append(toIndentedString(schemaInfo)).append("\n"); - sb.append(" validationRules: ").append(toIndentedString(validationRules)).append("\n"); + sb.append(" compatibility: ").append(toIndentedString(compatibility)).append("\n"); sb.append("}"); return sb.toString(); } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/VersionInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/VersionInfo.java index 9b4c2603d..e3fc20e42 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/VersionInfo.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/VersionInfo.java @@ -32,8 +32,8 @@ public class VersionInfo { @JsonProperty("version") private Integer version = null; - @JsonProperty("ordinal") - private Integer ordinal = null; + @JsonProperty("id") + private Integer id = null; public VersionInfo type(String type) { this.type = type; @@ -75,24 +75,24 @@ public void setVersion(Integer version) { this.version = version; } - public VersionInfo ordinal(Integer ordinal) { - this.ordinal = ordinal; + public VersionInfo id(Integer id) { + this.id = id; return this; } /** - * Version ordinal that uniquely identifies the position of the corresponding schema across all schemas in the group. - * @return ordinal + * schema id that uniquely identifies schema version and describes the absolute order in which the schema was added to the group. + * @return id **/ - @JsonProperty("ordinal") - @ApiModelProperty(required = true, value = "Version ordinal that uniquely identifies the position of the corresponding schema across all schemas in the group.") + @JsonProperty("id") + @ApiModelProperty(required = true, value = "schema id that uniquely identifies schema version and describes the absolute order in which the schema was added to the group.") @NotNull - public Integer getOrdinal() { - return ordinal; + public Integer getId() { + return id; } - public void setOrdinal(Integer ordinal) { - this.ordinal = ordinal; + public void setId(Integer id) { + this.id = id; } @@ -107,12 +107,12 @@ public boolean equals(java.lang.Object o) { VersionInfo versionInfo = (VersionInfo) o; return Objects.equals(this.type, versionInfo.type) && Objects.equals(this.version, versionInfo.version) && - Objects.equals(this.ordinal, versionInfo.ordinal); + Objects.equals(this.id, versionInfo.id); } @Override public int hashCode() { - return Objects.hash(type, version, ordinal); + return Objects.hash(type, version, id); } @@ -123,7 +123,7 @@ public String toString() { sb.append(" type: ").append(toIndentedString(type)).append("\n"); sb.append(" version: ").append(toIndentedString(version)).append("\n"); - sb.append(" ordinal: ").append(toIndentedString(ordinal)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append("}"); return sb.toString(); } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java index 47beb9d28..6e028b71d 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java @@ -1,10 +1,10 @@ /** * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * + *

* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + *

* http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.contract.transform; @@ -12,29 +12,39 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; +import io.pravega.schemaregistry.contract.generated.rest.model.Backward; +import io.pravega.schemaregistry.contract.generated.rest.model.BackwardAndForward; +import io.pravega.schemaregistry.contract.generated.rest.model.BackwardPolicy; +import io.pravega.schemaregistry.contract.generated.rest.model.BackwardTill; +import io.pravega.schemaregistry.contract.generated.rest.model.BackwardTransitive; import io.pravega.schemaregistry.contract.generated.rest.model.Compatibility; import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.Forward; +import io.pravega.schemaregistry.contract.generated.rest.model.ForwardPolicy; +import io.pravega.schemaregistry.contract.generated.rest.model.ForwardTill; +import io.pravega.schemaregistry.contract.generated.rest.model.ForwardTransitive; import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord; import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRule; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; import io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion; import io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat; import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; -import org.apache.commons.lang3.NotImplementedException; import java.nio.ByteBuffer; -import java.util.List; import java.util.Map; -import java.util.stream.Collectors; /** * Provides translation (encode/decode) between the Model classes and its REST representation. */ public class ModelHelper { private static final ObjectMapper MAPPER = new ObjectMapper(); + private static final String BACKWARD = Backward.class.getSimpleName(); + private static final String BACKWARD_TRANSITIVE = BackwardTransitive.class.getSimpleName(); + private static final String BACKWARD_TILL = BackwardTill.class.getSimpleName(); + private static final String FORWARD = Forward.class.getSimpleName(); + private static final String FORWARD_TILL = ForwardTill.class.getSimpleName(); + private static final String FORWARD_TRANSITIVE = ForwardTransitive.class.getSimpleName(); // region decode public static io.pravega.schemaregistry.contract.data.SchemaInfo decode(SchemaInfo schemaInfo) { @@ -59,53 +69,97 @@ public static io.pravega.schemaregistry.contract.data.SerializationFormat decode } } - public static io.pravega.schemaregistry.contract.data.SchemaValidationRules decode(SchemaValidationRules rules) { - Preconditions.checkArgument(rules != null && rules.getRules() != null && rules.getRules().size() == 1); - List list = rules.getRules().entrySet().stream().map(rule -> { - if (rule.getValue().getRule() instanceof Map) { - String name = (String) ((Map) rule.getValue().getRule()).get("name"); - Preconditions.checkArgument(name.equals(Compatibility.class.getSimpleName())); - - return decode(MAPPER.convertValue(rule.getValue().getRule(), Compatibility.class)); - } else if (rule.getValue().getRule() instanceof Compatibility) { - return decode((Compatibility) rule.getValue().getRule()); - } else { - throw new IllegalArgumentException("Rule not supported"); - } - }).collect(Collectors.toList()); - - return io.pravega.schemaregistry.contract.data.SchemaValidationRules.of( - (io.pravega.schemaregistry.contract.data.Compatibility) list.get(0)); + public static io.pravega.schemaregistry.contract.data.Compatibility decode(Compatibility compatibility) { + io.pravega.schemaregistry.contract.data.Compatibility.Type type = searchEnum(io.pravega.schemaregistry.contract.data.Compatibility.Type.class, compatibility.getPolicy().name()); + switch (type) { + case AllowAny: + return io.pravega.schemaregistry.contract.data.Compatibility.allowAny(); + case DenyAll: + return io.pravega.schemaregistry.contract.data.Compatibility.denyAll(); + case BackwardAndForward: + return io.pravega.schemaregistry.contract.data.Compatibility + .builder() + .type(type) + .backwardAndForward(decode(compatibility.getBackwardAndForward())).build(); + default: + throw new IllegalArgumentException(); + } } - public static io.pravega.schemaregistry.contract.data.Compatibility decode(Compatibility compatibility) { - Preconditions.checkArgument(compatibility.getName() != null); - Preconditions.checkArgument(compatibility.getPolicy() != null); - if (compatibility.getPolicy().equals(Compatibility.PolicyEnum.BACKWARDTILL)) { - Preconditions.checkArgument(compatibility.getBackwardTill() != null); + public static io.pravega.schemaregistry.contract.data.BackwardAndForward decode(BackwardAndForward compatibility) { + Preconditions.checkArgument(compatibility.getBackwardPolicy() != null || compatibility.getForwardPolicy() != null); + + io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardAndForwardBuilder builder = + io.pravega.schemaregistry.contract.data.BackwardAndForward.builder(); + if (compatibility.getBackwardPolicy() != null) { + builder.backwardPolicy(decode(compatibility.getBackwardPolicy())); } - if (compatibility.getPolicy().equals(Compatibility.PolicyEnum.FORWARDTILL)) { - Preconditions.checkArgument(compatibility.getForwardTill() != null); + if (compatibility.getForwardPolicy() != null) { + builder.forwardPolicy(decode(compatibility.getForwardPolicy())); } - if (compatibility.getPolicy().equals(Compatibility.PolicyEnum.BACKWARDANDFORWARDTILL)) { - Preconditions.checkArgument(compatibility.getBackwardTill() != null); - Preconditions.checkArgument(compatibility.getForwardTill() != null); + return builder.build(); + } + + public static io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardPolicy decode(BackwardPolicy backward) { + Object obj = backward.getBackwardPolicy(); + if (backward.getBackwardPolicy() instanceof Map) { + String name = (String) ((Map) backward.getBackwardPolicy()).get("name"); + if (name.equals(BACKWARD)) { + obj = MAPPER.convertValue(backward.getBackwardPolicy(), Backward.class); + } else if (name.equals(BACKWARD_TRANSITIVE)) { + obj = MAPPER.convertValue(backward.getBackwardPolicy(), BackwardTransitive.class); + } else if (name.equals(BACKWARD_TILL)) { + obj = MAPPER.convertValue(backward.getBackwardPolicy(), BackwardTill.class); + } else { + throw new IllegalArgumentException(); + } } - io.pravega.schemaregistry.contract.data.VersionInfo backwardTill = compatibility.getBackwardTill() == null ? null : decode(compatibility.getBackwardTill()); - io.pravega.schemaregistry.contract.data.VersionInfo forwardTill = compatibility.getForwardTill() == null ? null : decode(compatibility.getForwardTill()); + if (obj instanceof Backward) { + return new io.pravega.schemaregistry.contract.data.BackwardAndForward.Backward(); + } else if (obj instanceof BackwardTill) { + return new io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardTill( + decode(((io.pravega.schemaregistry.contract.generated.rest.model.BackwardTill) backward.getBackwardPolicy()).getVersion())); + } else if (obj instanceof BackwardTransitive) { + return new io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardTransitive(); + } else { + throw new IllegalArgumentException("Rule not supported"); + } + } + + public static io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardPolicy decode(io.pravega.schemaregistry.contract.generated.rest.model.ForwardPolicy forward) { + Object obj = forward.getForwardPolicy(); + if (forward.getForwardPolicy() instanceof Map) { + String name = (String) ((Map) forward.getForwardPolicy()).get("name"); + if (name.equals(FORWARD)) { + obj = MAPPER.convertValue(forward.getForwardPolicy(), Forward.class); + } else if (name.equals(FORWARD_TRANSITIVE)) { + obj = MAPPER.convertValue(forward.getForwardPolicy(), ForwardTransitive.class); + } else if (name.equals(FORWARD_TILL)) { + obj = MAPPER.convertValue(forward.getForwardPolicy(), ForwardTill.class); + } else { + throw new IllegalArgumentException(); + } + } - return io.pravega.schemaregistry.contract.data.Compatibility.builder().compatibility( - searchEnum(io.pravega.schemaregistry.contract.data.Compatibility.Type.class, compatibility.getPolicy().name())) - .backwardTill(backwardTill).forwardTill(forwardTill).build(); + if (obj instanceof Forward) { + return new io.pravega.schemaregistry.contract.data.BackwardAndForward.Forward(); + } else if (obj instanceof ForwardTill) { + return new io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardTill( + decode(((io.pravega.schemaregistry.contract.generated.rest.model.ForwardTill) forward.getForwardPolicy()).getVersion())); + } else if (obj instanceof ForwardTransitive) { + return new io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardTransitive(); + } else { + throw new IllegalArgumentException("Rule not supported"); + } } - + public static io.pravega.schemaregistry.contract.data.VersionInfo decode(VersionInfo versionInfo) { Preconditions.checkArgument(versionInfo != null); Preconditions.checkArgument(versionInfo.getType() != null); Preconditions.checkArgument(versionInfo.getVersion() != null); - Preconditions.checkArgument(versionInfo.getOrdinal() != null); - return new io.pravega.schemaregistry.contract.data.VersionInfo(versionInfo.getType(), versionInfo.getVersion(), versionInfo.getOrdinal()); + Preconditions.checkArgument(versionInfo.getId() != null); + return new io.pravega.schemaregistry.contract.data.VersionInfo(versionInfo.getType(), versionInfo.getVersion(), versionInfo.getId()); } public static io.pravega.schemaregistry.contract.data.EncodingInfo decode(EncodingInfo encodingInfo) { @@ -124,7 +178,7 @@ public static io.pravega.schemaregistry.contract.data.GroupHistoryRecord decode( Preconditions.checkArgument(schemaEvolution != null); return new io.pravega.schemaregistry.contract.data.GroupHistoryRecord(decode(schemaEvolution.getSchemaInfo()), - decode(schemaEvolution.getVersion()), decode(schemaEvolution.getValidationRules()), schemaEvolution.getTimestamp(), + decode(schemaEvolution.getVersion()), decode(schemaEvolution.getCompatibility()), schemaEvolution.getTimestamp(), schemaEvolution.getSchemaString()); } @@ -140,7 +194,7 @@ public static io.pravega.schemaregistry.contract.data.GroupProperties decode(Gro Preconditions.checkArgument(groupProperties.isAllowMultipleTypes() != null); return io.pravega.schemaregistry.contract.data.GroupProperties.builder().serializationFormat(decode(groupProperties.getSerializationFormat())) - .schemaValidationRules(decode(groupProperties.getSchemaValidationRules())).allowMultipleTypes(groupProperties.isAllowMultipleTypes()) + .compatibility(decode(groupProperties.getCompatibility())).allowMultipleTypes(groupProperties.isAllowMultipleTypes()) .properties(ImmutableMap.copyOf(groupProperties.getProperties())).build(); } // endregion @@ -149,45 +203,57 @@ public static io.pravega.schemaregistry.contract.data.GroupProperties decode(Gro public static GroupHistoryRecord encode(io.pravega.schemaregistry.contract.data.GroupHistoryRecord groupHistoryRecord) { return new GroupHistoryRecord().schemaInfo(encode(groupHistoryRecord.getSchema())) .version(encode(groupHistoryRecord.getVersion())) - .validationRules(encode(groupHistoryRecord.getRules())) + .compatibility(encode(groupHistoryRecord.getCompatibility())) .timestamp(groupHistoryRecord.getTimestamp()) .schemaString(groupHistoryRecord.getSchemaString()); } - public static SchemaValidationRules encode(io.pravega.schemaregistry.contract.data.SchemaValidationRules rules) { - Map map = rules.getRules().entrySet().stream().collect(Collectors.toMap(rule -> { - if (rule.getValue() instanceof io.pravega.schemaregistry.contract.data.Compatibility) { - return io.pravega.schemaregistry.contract.generated.rest.model.Compatibility.class.getSimpleName(); - } else { - throw new NotImplementedException("Rule not implemented"); - } - }, rule -> { - SchemaValidationRule schemaValidationRule; - if (rule.getValue() instanceof io.pravega.schemaregistry.contract.data.Compatibility) { - schemaValidationRule = new SchemaValidationRule().rule(encode((io.pravega.schemaregistry.contract.data.Compatibility) rule.getValue())); - } else { - throw new NotImplementedException("Rule not implemented"); - } - return schemaValidationRule; - })); - return new SchemaValidationRules().rules(map); - } - public static Compatibility encode(io.pravega.schemaregistry.contract.data.Compatibility compatibility) { Compatibility policy = new io.pravega.schemaregistry.contract.generated.rest.model.Compatibility() - .name(compatibility.getName()) - .policy(searchEnum(Compatibility.PolicyEnum.class, compatibility.getCompatibility().name())); - if (compatibility.getBackwardTill() != null) { - VersionInfo backwardTill = encode(compatibility.getBackwardTill()); - policy = policy.backwardTill(backwardTill); - } - if (compatibility.getForwardTill() != null) { - VersionInfo forwardTill = encode(compatibility.getForwardTill()); - policy = policy.forwardTill(forwardTill); + .policy(searchEnum(Compatibility.PolicyEnum.class, compatibility.getType().name())); + if (policy.getPolicy().equals(Compatibility.PolicyEnum.BACKWARDANDFORWARD)) { + policy.backwardAndForward(encode(compatibility.getBackwardAndForward())); } return policy; } + public static BackwardAndForward encode(io.pravega.schemaregistry.contract.data.BackwardAndForward backwardAndForward) { + BackwardAndForward retVal = new BackwardAndForward(); + if (backwardAndForward.getBackwardPolicy() != null) { + retVal.backwardPolicy(encode(backwardAndForward.getBackwardPolicy())); + } + if (backwardAndForward.getForwardPolicy() != null) { + retVal.forwardPolicy(encode(backwardAndForward.getForwardPolicy())); + } + return retVal; + } + + public static BackwardPolicy encode(io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardPolicy backwardPolicy) { + if (backwardPolicy instanceof io.pravega.schemaregistry.contract.data.BackwardAndForward.Backward) { + return new BackwardPolicy().backwardPolicy(new Backward().name(Backward.class.getSimpleName())); + } else if (backwardPolicy instanceof io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardTransitive) { + return new BackwardPolicy().backwardPolicy(new BackwardTransitive().name(BackwardTransitive.class.getSimpleName())); + } else if (backwardPolicy instanceof io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardTill) { + VersionInfo version = encode(((io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardTill) backwardPolicy).getVersionInfo()); + return new BackwardPolicy().backwardPolicy(new BackwardTill().name(BackwardTill.class.getSimpleName()).version(version)); + } else { + throw new IllegalArgumentException(); + } + } + + public static ForwardPolicy encode(io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardPolicy forwardPolicy) { + if (forwardPolicy instanceof io.pravega.schemaregistry.contract.data.BackwardAndForward.Forward) { + return new ForwardPolicy().forwardPolicy(new Forward().name(Forward.class.getSimpleName())); + } else if (forwardPolicy instanceof io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardTransitive) { + return new ForwardPolicy().forwardPolicy(new ForwardTransitive().name(ForwardTransitive.class.getSimpleName())); + } else if (forwardPolicy instanceof io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardTill) { + VersionInfo version = encode(((io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardTill) forwardPolicy).getVersionInfo()); + return new ForwardPolicy().forwardPolicy(new ForwardTill().name(ForwardTill.class.getSimpleName()).version(version)); + } else { + throw new IllegalArgumentException(); + } + } + public static SchemaWithVersion encode(io.pravega.schemaregistry.contract.data.SchemaWithVersion schemaWithVersion) { return new SchemaWithVersion().schemaInfo(encode(schemaWithVersion.getSchemaInfo())) .version(encode(schemaWithVersion.getVersionInfo())); @@ -198,11 +264,11 @@ public static GroupProperties encode(io.pravega.schemaregistry.contract.data.Gro .serializationFormat(encode(groupProperties.getSerializationFormat())) .properties(groupProperties.getProperties()) .allowMultipleTypes(groupProperties.isAllowMultipleTypes()) - .schemaValidationRules(encode(groupProperties.getSchemaValidationRules())); + .compatibility(encode(groupProperties.getCompatibility())); } public static VersionInfo encode(io.pravega.schemaregistry.contract.data.VersionInfo versionInfo) { - return new VersionInfo().type(versionInfo.getType()).version(versionInfo.getVersion()).ordinal(versionInfo.getOrdinal()); + return new VersionInfo().type(versionInfo.getType()).version(versionInfo.getVersion()).id(versionInfo.getId()); } public static SchemaInfo encode(io.pravega.schemaregistry.contract.data.SchemaInfo schemaInfo) { @@ -224,7 +290,7 @@ public static SerializationFormat encode(io.pravega.schemaregistry.contract.data public static EncodingId encode(io.pravega.schemaregistry.contract.data.EncodingId encodingId) { return new EncodingId().encodingId(encodingId.getId()); } - + public static EncodingInfo encode(io.pravega.schemaregistry.contract.data.EncodingInfo encodingInfo) { return new EncodingInfo().codecType(encodingInfo.getCodecType()) .versionInfo(encode(encodingInfo.getVersionInfo())) diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java index 57026dc2e..06201971b 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java @@ -22,7 +22,7 @@ import io.pravega.schemaregistry.contract.generated.rest.model.ListGroupsResponse; import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; import io.pravega.schemaregistry.contract.generated.rest.model.SchemaVersionsList; -import io.pravega.schemaregistry.contract.generated.rest.model.UpdateValidationRulesRequest; +import io.pravega.schemaregistry.contract.generated.rest.model.UpdateCompatibilityRequest; import io.pravega.schemaregistry.contract.generated.rest.model.Valid; import io.pravega.schemaregistry.contract.generated.rest.model.ValidateRequest; import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; @@ -85,7 +85,7 @@ Response addSchema(@ApiParam(value = "Group name", required = true) @PathParam(" @Path("/{groupName}/schemas/versions/canRead") @Consumes({"application/json"}) @Produces({"application/json"}) - @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema can be used for reads subject to compatibility policy in the schema validation rules.", response = CanRead.class, tags = {"Group", }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema can be used for reads subject to compatibility policy in the compatibility.", response = CanRead.class, tags = {"Group", }) @io.swagger.annotations.ApiResponses(value = { @io.swagger.annotations.ApiResponse(code = 200, message = "Response to tell whether schema can be used to read existing schemas", response = CanRead.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @@ -187,31 +187,31 @@ Response getEncodingId(@ApiParam(value = "Group name", required = true) @PathPar @ApiParam(value = "Get schema corresponding to the version", required = true) GetEncodingIdRequest getEncodingIdRequest); @DELETE - @Path("/{groupName}/schemas/versions/{versionOrdinal}") + @Path("/{groupName}/schemas/schema/{schemaId}") @Produces({"application/json"}) @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema version from the group.", response = Void.class, tags = {"Group", }) @io.swagger.annotations.ApiResponses(value = { @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version deleted", response = Void.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class)}) - Response deleteSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version); + Response deleteSchemaForId(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "version id", required = true) @PathParam("schemaId") Integer version); @GET - @Path("/{groupName}/schemas/versions/{versionOrdinal}") + @Path("/{groupName}/schemas/schema/{schemaId}") @Produces({"application/json"}) - @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version id that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) @io.swagger.annotations.ApiResponses(value = { @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class)}) - Response getSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version); + Response getSchemaForId(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "version id", required = true) @PathParam("schemaId") Integer schemaId); @GET @Path("/{groupName}/schemas/{type}/versions/{version}") @Produces({"application/json"}) - @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version id that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) @io.swagger.annotations.ApiResponses(value = { @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @@ -254,16 +254,16 @@ Response listGroups(@ApiParam(value = "Continuation token") @QueryParam("continu @ApiParam(value = "The numbers of items to return") @QueryParam("limit") Integer limit); @PUT - @Path("/{groupName}/rules") + @Path("/{groupName}/compatibility") @Consumes({"application/json"}) - @io.swagger.annotations.ApiOperation(value = "", notes = "update schema validation rules of an existing Group", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiOperation(value = "", notes = "update compatibility of an existing Group", response = Void.class, tags = {"Group", }) @io.swagger.annotations.ApiResponses(value = { @io.swagger.annotations.ApiResponse(code = 200, message = "Updated schema validation policy", response = Void.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 409, message = "Write conflict", response = Void.class), - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while updating Group's schema validation rules", response = Void.class)}) - Response updateSchemaValidationRules(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "update group policy", required = true) UpdateValidationRulesRequest updateValidationRulesRequest); + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while updating Group's compatibility", response = Void.class)}) + Response updateCompatibility(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "update group policy", required = true) UpdateCompatibilityRequest updateCompatibilityRequest); @POST @Path("/{groupName}/schemas/versions/validate") @@ -275,7 +275,7 @@ Response updateSchemaValidationRules(@ApiParam(value = "Group name", required = @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while trying to validate schema", response = Void.class)}) Response validate(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "Checks if schema is valid with respect to supplied validation rules", required = true) ValidateRequest validateRequest); + @ApiParam(value = "Checks if schema is valid with respect to supplied compatibility", required = true) ValidateRequest validateRequest); } /** @@ -309,19 +309,21 @@ void addCodecType(@ApiParam(value = "Group name", required = true) @PathParam("g @io.swagger.annotations.ApiResponse(code = 417, message = "Invalid serialization format", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while adding a schema", response = Void.class)}) void addSchema(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "Add new schema to group", required = true) SchemaInfo schemaInfo, @Suspended AsyncResponse asyncResponse); + @ApiParam(value = "Add new schema to group", required = true) SchemaInfo schemaInfo, + @Suspended AsyncResponse asyncResponse); @POST @Path("/{groupName}/schemas/versions/canRead") @Consumes({"application/json"}) @Produces({"application/json"}) - @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema can be used for reads subject to compatibility policy in the schema validation rules.", response = CanRead.class, tags = {"Group", }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Checks if given schema can be used for reads subject to compatibility policy in the compatibility.", response = CanRead.class, tags = {"Group", }) @io.swagger.annotations.ApiResponses(value = { @io.swagger.annotations.ApiResponse(code = 200, message = "Response to tell whether schema can be used to read existing schemas", response = CanRead.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while checking schema for readability", response = Void.class)}) void canRead(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "Checks if schema can be used to read the data in the stream based on compatibility rules.", required = true) SchemaInfo schemaInfo, @Suspended AsyncResponse asyncResponse); + @ApiParam(value = "Checks if schema can be used to read the data in the stream based on compatibility policy.", required = true) SchemaInfo schemaInfo, + @Suspended AsyncResponse asyncResponse); @POST @Consumes({"application/json"}) @@ -330,7 +332,8 @@ void canRead(@ApiParam(value = "Group name", required = true) @PathParam("groupN @io.swagger.annotations.ApiResponse(code = 201, message = "Successfully added group", response = Void.class), @io.swagger.annotations.ApiResponse(code = 409, message = "Group with given name already exists", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while creating a Group", response = Void.class)}) - void createGroup(@ApiParam(value = "The Group configuration", required = true) CreateGroupRequest createGroupRequest, @Suspended AsyncResponse asyncResponse); + void createGroup(@ApiParam(value = "The Group configuration", required = true) CreateGroupRequest createGroupRequest, + @Suspended AsyncResponse asyncResponse); @DELETE @Path("/{groupName}") @@ -338,7 +341,8 @@ void canRead(@ApiParam(value = "Group name", required = true) @PathParam("groupN @io.swagger.annotations.ApiResponses(value = { @io.swagger.annotations.ApiResponse(code = 204, message = "Successfully deleted the Group", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting the Group", response = Void.class)}) - void deleteGroup(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse); + void deleteGroup(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @Suspended AsyncResponse asyncResponse); @GET @Path("/{groupName}/codecTypes") @@ -348,7 +352,8 @@ void canRead(@ApiParam(value = "Group name", required = true) @PathParam("groupN @io.swagger.annotations.ApiResponse(code = 200, message = "Found CodecTypes", response = CodecTypesList.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching codecTypes registered", response = Void.class)}) - void getCodecTypesList(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse); + void getCodecTypesList(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @Suspended AsyncResponse asyncResponse); @GET @Path("/{groupName}/encodings/{encodingId}") @@ -359,7 +364,8 @@ void canRead(@ApiParam(value = "Group name", required = true) @PathParam("groupN @io.swagger.annotations.ApiResponse(code = 404, message = "Group or encoding id with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding info corresponding to encoding id", response = Void.class)}) void getEncodingInfo(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "Encoding id that identifies a unique combination of schema and codecType", required = true) @PathParam("encodingId") Integer encodingId, @Suspended AsyncResponse asyncResponse); + @ApiParam(value = "Encoding id that identifies a unique combination of schema and codecType", required = true) @PathParam("encodingId") Integer encodingId, + @Suspended AsyncResponse asyncResponse); @GET @Path("/{groupName}") @@ -369,7 +375,8 @@ void getEncodingInfo(@ApiParam(value = "Group name", required = true) @PathParam @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group properties", response = GroupProperties.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group details", response = Void.class)}) - void getGroupProperties(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse); + void getGroupProperties(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @Suspended AsyncResponse asyncResponse); @GET @Path("/{groupName}/history") @@ -379,7 +386,8 @@ void getEncodingInfo(@ApiParam(value = "Group name", required = true) @PathParam @io.swagger.annotations.ApiResponse(code = 200, message = "Found Group history", response = GroupHistory.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching Group history", response = Void.class)}) - void getGroupHistory(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, @Suspended AsyncResponse asyncResponse); + void getGroupHistory(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @Suspended AsyncResponse asyncResponse); @GET @Path("/{groupName}/schemas/versions") @@ -416,34 +424,37 @@ void getSchemas(@ApiParam(value = "Group name", required = true) @PathParam("gro @io.swagger.annotations.ApiResponse(code = 412, message = "Codec type not registered", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while getting encoding id", response = Void.class)}) void getEncodingId(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "Get schema corresponding to the version", required = true) GetEncodingIdRequest getEncodingIdRequest, @Suspended AsyncResponse asyncResponse); + @ApiParam(value = "Get schema corresponding to the version", required = true) GetEncodingIdRequest getEncodingIdRequest, + @Suspended AsyncResponse asyncResponse); @GET - @Path("/{groupName}/schemas/versions/{versionOrdinal}") + @Path("/{groupName}/schemas/schema/{schemaId}") @Produces({"application/json"}) - @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version id that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) @io.swagger.annotations.ApiResponses(value = { @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching schema from version", response = Void.class)}) - void getSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version, @Suspended AsyncResponse asyncResponse); + void getSchemaForId(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "version id", required = true) @PathParam("schemaId") Integer schemaId, + @Suspended AsyncResponse asyncResponse); @DELETE - @Path("/{groupName}/schemas/versions/{versionOrdinal}") + @Path("/{groupName}/schemas/schema/{schemaId}") @Produces({"application/json"}) @io.swagger.annotations.ApiOperation(value = "", notes = "Delete schema version from the group.", response = Void.class, tags = {"Group", }) @io.swagger.annotations.ApiResponses(value = { @io.swagger.annotations.ApiResponse(code = 204, message = "Schema corresponding to the version", response = Void.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while deleting schema from group", response = Void.class)}) - void deleteSchemaFromVersionOrdinal(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "version ordinal", required = true) @PathParam("versionOrdinal") Integer version, @Suspended AsyncResponse asyncResponse); + void deleteSchemaForId(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "version id", required = true) @PathParam("schemaId") Integer version, + @Suspended AsyncResponse asyncResponse); @GET @Path("/{groupName}/schemas/{type}/versions/{version}") @Produces({"application/json"}) - @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version ordinal that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) + @io.swagger.annotations.ApiOperation(value = "", notes = "Get schema from the version id that uniquely identifies the schema in the group.", response = SchemaInfo.class, tags = {"Group", }) @io.swagger.annotations.ApiResponses(value = { @io.swagger.annotations.ApiResponse(code = 200, message = "Schema corresponding to the version", response = SchemaInfo.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @@ -486,19 +497,21 @@ void getSchemaVersion(@ApiParam(value = "Group name", required = true) @PathPara @io.swagger.annotations.ApiResponse(code = 200, message = "List of all groups", response = ListGroupsResponse.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while fetching the list of Groups", response = Void.class)}) void listGroups(@ApiParam(value = "Continuation token") @QueryParam("continuationToken") String continuationToken, - @ApiParam(value = "The numbers of items to return") @QueryParam("limit") Integer limit, @Suspended AsyncResponse asyncResponse); + @ApiParam(value = "The numbers of items to return") @QueryParam("limit") Integer limit, + @Suspended AsyncResponse asyncResponse); @PUT - @Path("/{groupName}/rules") + @Path("/{groupName}/compatibility") @Consumes({"application/json"}) - @io.swagger.annotations.ApiOperation(value = "", notes = "update schema validation rules of an existing Group", response = Void.class, tags = {"Group", }) + @io.swagger.annotations.ApiOperation(value = "", notes = "update compatibility of an existing Group", response = Void.class, tags = {"Group", }) @io.swagger.annotations.ApiResponses(value = { @io.swagger.annotations.ApiResponse(code = 200, message = "Updated schema validation policy", response = Void.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 409, message = "Write conflict", response = Void.class), - @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while updating Group's schema validation rules", response = Void.class)}) - void updateSchemaValidationRules(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "update group policy", required = true) UpdateValidationRulesRequest updateValidationRulesRequest, @Suspended AsyncResponse asyncResponse); + @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while updating Group's compatibility", response = Void.class)}) + void updateCompatibility(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, + @ApiParam(value = "update group policy", required = true) UpdateCompatibilityRequest updateCompatibilityRequest, + @Suspended AsyncResponse asyncResponse); @POST @Path("/{groupName}/schemas/versions/validate") @@ -510,7 +523,8 @@ void updateSchemaValidationRules(@ApiParam(value = "Group name", required = true @io.swagger.annotations.ApiResponse(code = 404, message = "Group with given name not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while trying to validate schema", response = Void.class)}) void validate(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "Checks if schema is valid with respect to supplied validation rules", required = true) ValidateRequest validateRequest, @Suspended AsyncResponse asyncResponse); + @ApiParam(value = "Checks if schema is valid with respect to supplied compatibility", required = true) ValidateRequest validateRequest, + @Suspended AsyncResponse asyncResponse); } /** diff --git a/contract/src/main/swagger/README.md b/contract/src/main/swagger/README.md index bc196acf3..37d0c295d 100644 --- a/contract/src/main/swagger/README.md +++ b/contract/src/main/swagger/README.md @@ -40,5 +40,5 @@ https://jcenter.bintray.com/io/github/swagger2markup/swagger2markup-cli/1.3.3/sw ### Generate and save the markup documentation ``` -java -Dswagger2markup.markupLanguage=MARKDOWN -Dswagger2markup.generatedExamplesEnabled=true -jar swagger2markup-cli-1.3.3.jar convert -i /contract/src/main/swagger/schemaregistry.yaml -f /documentation/src/docs/io.pravega.rest/restapis +java -Dswagger2markup.markupLanguage=MARKDOWN -Dswagger2markup.generatedExamplesEnabled=true -jar swagger2markup-cli-1.3.3.jar convert -i /contract/src/main/swagger/SchemaRegistry.yaml -f /documentation/src/docs/io.pravega.rest/restapis ``` diff --git a/contract/src/main/swagger/SchemaRegistry.yaml b/contract/src/main/swagger/SchemaRegistry.yaml index 92dba5fd1..105d65bdc 100644 --- a/contract/src/main/swagger/SchemaRegistry.yaml +++ b/contract/src/main/swagger/SchemaRegistry.yaml @@ -137,7 +137,7 @@ paths: description: Group with given name not found 500: description: Internal server error while fetching Group history - /groups/{groupName}/rules: + /groups/{groupName}/compatibility: parameters: - in: path name: groupName @@ -147,34 +147,34 @@ paths: put: tags: - "Group" - operationId: updateSchemaValidationRules - description: update schema validation rules of an existing Group + operationId: updateCompatibility + description: update schema compatibility of an existing Group consumes: - application/json parameters: - in: body - name: UpdateValidationRulesRequest + name: UpdateCompatibilityRequest description: update group policy required: true schema: type: object properties: - validationRules: - $ref: "#/definitions/SchemaValidationRules" - previousRules: - $ref: "#/definitions/SchemaValidationRules" + compatibility: + $ref: "#/definitions/Compatibility" + previousCompatibility: + $ref: "#/definitions/Compatibility" nullable: true required: - - validationRules + - compatibility responses: 200: - description: Updated schema validation policy + description: Updated schema compatibility policy 404: description: Group with given name not found 409: description: Write conflict 500: - description: Internal server error while updating Group's schema validation rules + description: Internal server error while updating Group's schema compatibility /groups/{groupName}/schemas: parameters: - in: path @@ -291,7 +291,7 @@ paths: description: Group with given name not found 500: description: Internal server error fetching version for schema - /groups/{groupName}/schemas/versions/{versionOrdinal}: + /groups/{groupName}/schemas/schema/{schemaId}: parameters: - in: path name: groupName @@ -299,16 +299,16 @@ paths: required: true type: string - in: path - name: versionOrdinal - description: Version ordinal + name: schemaId + description: Schema Id required: true type: integer format: int32 get: tags: - "Group" - operationId: getSchemaFromVersionOrdinal - description: Get schema from the version ordinal that uniquely identifies the schema in the group. + operationId: getSchemaForId + description: Get schema from the schema id that uniquely identifies the schema in the group. produces: - application/json responses: @@ -323,7 +323,7 @@ paths: delete: tags: - "Group" - operationId: deleteSchemaVersionOrinal + operationId: deleteSchemaForId description: Delete schema identified by version from the group. produces: - application/json @@ -356,7 +356,7 @@ paths: tags: - "Group" operationId: getSchemaFromVersion - description: Get schema from the version ordinal that uniquely identifies the schema in the group. + description: Get schema from the version number that uniquely identifies the schema in the group. produces: - application/json responses: @@ -399,15 +399,15 @@ paths: parameters: - in: body name: ValidateRequest - description: Checks if schema is valid with respect to supplied validation rules + description: Checks if schema is valid with respect to supplied compatibility required: true schema: type: object properties: schemaInfo: $ref: "#/definitions/SchemaInfo" - validationRules: - $ref: "#/definitions/SchemaValidationRules" + compatibility: + $ref: "#/definitions/Compatibility" required: - schemaInfo produces: @@ -432,13 +432,13 @@ paths: tags: - "Group" operationId: canRead - description: Checks if given schema can be used for reads subject to compatibility policy in the schema validation rules. + description: Checks if given schema can be used for reads subject to compatibility policy in the schema compatibility. consumes: - application/json parameters: - in: body name: schemaInfo - description: Checks if schema can be used to read the data in the stream based on compatibility rules. + description: Checks if schema can be used to read the data in the stream based on compatibility policy. required: true schema: $ref: "#/definitions/SchemaInfo" @@ -572,7 +572,7 @@ paths: parameters: post: tags: - - "Schema" + - "Schemas" operationId: getSchemaReferences description: Gets a map of groups to version info where the schema if it is registered. SchemaInfo#properties is ignored while comparing the schema. consumes: @@ -616,9 +616,9 @@ definitions: serializationFormat: description: serialization format for the group. $ref: "#/definitions/SerializationFormat" - schemaValidationRules: - description: Validation rules to apply while registering new schema. - $ref: "#/definitions/SchemaValidationRules" + compatibility: + description: Compatibility to apply while registering new schema. + $ref: "#/definitions/Compatibility" allowMultipleTypes: description: Flag to indicate whether to allow multiple schemas representing distinct objects to be registered in the group. type: boolean @@ -632,7 +632,7 @@ definitions: required: - serializationFormat - allowMultipleTypes - - schemaValidationRules + - compatibility SerializationFormat: type: object description: Serialization format enum that lists different serialization formats supported by the service. To use additional formats, use serializationFormat.Custom and supply customTypeName. @@ -685,14 +685,14 @@ definitions: description: Version number that uniquely identifies the schema version among all schemas in the group that share the same Type. type: integer format: int32 - ordinal: - description: Version ordinal that uniquely identifies the position of the corresponding schema across all schemas in the group. + id: + description: schema id that uniquely identifies schema version and describes the absolute order in which the schema was added to the group. type: integer format: int32 required: - type - version - - ordinal + - id SchemaWithVersion: type: object description: Object that encapsulates SchemaInfo and its corresponding VersionInfo objects. @@ -744,57 +744,122 @@ definitions: - codecType Compatibility: type: object - description: Schema Compatibility validation rule. - required: - - name - - policy + description: Compatibility policy. properties: - name: - type: string - description: Name is used to identify the type of SchemaValidationRule. For Compatibility rule the name should be "Compatibility". policy: description: Compatibility policy enum. type: string enum: - AllowAny - DenyAll - - Backward - - Forward - - ForwardTransitive - - BackwardTransitive - - BackwardTill - - ForwardTill - - BackwardAndForwardTill - - Full - - FullTransitive - backwardTill: - description: Version for backward till if policy is BackwardTill or BackwardAndForwardTill. - $ref: "#/definitions/VersionInfo" - forwardTill: - description: Version for forward till if policy is ForwardTill or BackwardAndForwardTill. - $ref: "#/definitions/VersionInfo" - SchemaValidationRules: + - BackwardAndForward + backwardAndForward: + description: Backward and forward policy details. + $ref: "#/definitions/BackwardAndForward" + required: + - policy + BackwardAndForward: type: object - description: Schema validation rules to be applied for new schema addition. Currently only one rule is supported - Compatibility. + description: BackwardPolicy and forwardPolicy policy. properties: - rules: - type: object - additionalProperties: - $ref: "#/definitions/SchemaValidationRule" - SchemaValidationRule: + backwardPolicy: + description: BackwardAndForward policy type that describes different types of BackwardPolicy policies like Backward, BackwardTransitive and BackwardTill. + $ref: "#/definitions/BackwardPolicy" + forwardPolicy: + description: BackwardAndForward policy type that describes different types of ForwardPolicy policies like Forward, ForwardTransitive and ForwardTill. + $ref: "#/definitions/ForwardPolicy" + BackwardPolicy: type: object - description: Schema validation rule base class. + description: BackwardPolicy policy. required: - - rule + - backwardPolicy properties: - rule: - description: Specific schema validation rule. The only rule we have presently is Compatibility. The "name" is used to identify specific Rule type. The only rule supported in this is Compatibility. + backwardPolicy: + description: BackwardAndForward type backwardPolicy. Can be one of Backward, backwardTill and backwardTransitive. oneOf: - - $ref: '#/definitions/Compatibility' + - $ref: '#/definitions/Backward' + - $ref: '#/definitions/BackwardTill' + - $ref: '#/definitions/BackwardTransitive' discriminator: propertyName: name mapping: - Compatibility: '#/definitions/Compatibility' + Backward: "#/definitions/Backward" + BackwardTill: "#/definitions/BackwardTill" + BackwardTransitive: "#/definitions/BackwardTransitive" + ForwardPolicy: + type: object + description: ForwardPolicy policy. + required: + - forwardPolicy + properties: + forwardPolicy: + description: BackwardAndForward type forwardPolicy. Can be one of forward, forwardTill and forwardTransitive. + oneOf: + - $ref: "#/definitions/Forward" + - $ref: "#/definitions/ForwardTill" + - $ref: "#/definitions/ForwardTransitive" + discriminator: + propertyName: name + mapping: + Forward: "#/definitions/Forward" + ForwardTill: "#/definitions/ForwardTill" + ForwardTransitive: "#/definitions/ForwardTransitive" + Backward: + type: object + description: BackwardPolicy compatibility type which tells the service to check for backwardPolicy compatibility with latest schema. + properties: + name: + type: string + required: + - name + Forward: + type: object + description: ForwardPolicy compatibility type which tells the service to check for forwardPolicy compatibilty with latest schema. + properties: + name: + type: string + required: + - name + BackwardTransitive: + type: object + description: BackwardPolicy compatibility type which tells the service to check for backwardPolicy compatibility with all previous schemas. + properties: + name: + type: string + required: + - name + ForwardTransitive: + type: object + description: ForwardPolicy compatibility type which tells the service to check for forwardPolicy compatibility with all previous schemas. + properties: + name: + type: string + required: + - name + BackwardTill: + type: object + description: BackwardPolicy compatibility which tells the service to check for backwardPolicy compatibility with all previous schemas till specific version. + properties: + name: + type: string + version: + description: Whether given schema is valid with respect to existing group schemas against the configured compatibility. + $ref: "#/definitions/VersionInfo" + required: + - name + - version + ForwardTill: + type: object + description: ForwardPolicy compatibility which tells the service to check for forwardPolicy compatibility with all previous schemas till specific version. + properties: + name: + type: string + version: + description: Whether given schema is valid with respect to existing group schemas against the configured compatibility. + $ref: "#/definitions/VersionInfo" + required: + - name + - version CodecTypesList: type: object description: Response object for listCodecTypes. @@ -809,7 +874,7 @@ definitions: description: Response object for validateSchema api. properties: valid: - description: Whether given schema is valid with respect to existing group schemas against the configured validation rules. + description: Whether given schema is valid with respect to existing group schemas against the configured compatibility. type: boolean required: - valid @@ -818,13 +883,13 @@ definitions: description: Response object for canRead api. properties: compatible: - description: Whether given schema is compatible and can be used for reads. Compatibility is checked against existing group schemas subject to group's configured compatibility policy. + description: Whether given schema is compatible and can be used for reads. BackwardAndForward is checked against existing group schemas subject to group's configured compatibility policy. type: boolean required: - compatible GroupHistoryRecord: type: object - description: Group History Record that describes each schema evolution - schema information, version generated for the schema, time and rules used for schema validation. + description: Group History Record that describes each schema evolution - schema information, version generated for the schema, time and compatibility policy used for schema validation. properties: schemaInfo: description: Schema information object. @@ -832,9 +897,9 @@ definitions: version: description: Schema version information object. $ref: "#/definitions/VersionInfo" - validationRules: - description: Schema validation rules applied. - $ref: "#/definitions/SchemaValidationRules" + compatibility: + description: Schema compatibility applied. + $ref: "#/definitions/Compatibility" timestamp: description: Timestamp when the schema was added. type: integer @@ -845,7 +910,7 @@ definitions: required: - schemaInfo - version - - validationRules + - compatibility - timestamp GroupHistory: type: object @@ -861,6 +926,7 @@ definitions: properties: groups: type: object + description: Version for the schema in the group. additionalProperties: $ref: "#/definitions/VersionInfo" required: diff --git a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java index c9be6894f..c229b26dd 100644 --- a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java +++ b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java @@ -1,23 +1,27 @@ /** * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * + *

* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + *

* http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.contract.transform; import com.google.common.collect.ImmutableMap; -import io.pravega.schemaregistry.contract.data.GroupHistoryRecord; +import io.pravega.schemaregistry.contract.generated.rest.model.Backward; +import io.pravega.schemaregistry.contract.generated.rest.model.BackwardAndForward; +import io.pravega.schemaregistry.contract.generated.rest.model.BackwardPolicy; +import io.pravega.schemaregistry.contract.generated.rest.model.BackwardTill; import io.pravega.schemaregistry.contract.generated.rest.model.Compatibility; import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; +import io.pravega.schemaregistry.contract.generated.rest.model.ForwardPolicy; +import io.pravega.schemaregistry.contract.generated.rest.model.ForwardTill; +import io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord; import io.pravega.schemaregistry.contract.generated.rest.model.GroupProperties; import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRule; -import io.pravega.schemaregistry.contract.generated.rest.model.SchemaValidationRules; import io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion; import io.pravega.schemaregistry.contract.generated.rest.model.SerializationFormat; import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; @@ -33,14 +37,19 @@ public class ModelHelperTest { @Test public void testDecode() { SerializationFormat type = new SerializationFormat().serializationFormat(SerializationFormat.SerializationFormatEnum.CUSTOM).customTypeName("a"); - SchemaValidationRules rules = new SchemaValidationRules().rules(Collections.singletonMap(Compatibility.class.getSimpleName(), - new SchemaValidationRule().rule(new Compatibility().name(Compatibility.class.getSimpleName()) - .policy(Compatibility.PolicyEnum.BACKWARD)))); + Compatibility backward = new Compatibility() + .policy(Compatibility.PolicyEnum.BACKWARDANDFORWARD) + .backwardAndForward(new BackwardAndForward().backwardPolicy(new BackwardPolicy() + .backwardPolicy(new Backward().name(Backward.class.getSimpleName())))); SchemaInfo schema = new SchemaInfo() .type("a").serializationFormat(type).schemaData(new byte[0]).properties(Collections.emptyMap()); - VersionInfo version = new VersionInfo().type("a").version(1).ordinal(1); - Compatibility compatibility = new Compatibility().name(Compatibility.class.getSimpleName()) - .policy(Compatibility.PolicyEnum.BACKWARDANDFORWARDTILL).backwardTill(version).forwardTill(version); + VersionInfo version = new VersionInfo().type("a").version(1).id(1); + Compatibility backwardTillForwardTill = new Compatibility() + .policy(Compatibility.PolicyEnum.BACKWARDANDFORWARD) + .backwardAndForward(new BackwardAndForward().backwardPolicy(new BackwardPolicy() + .backwardPolicy(new BackwardTill().name(BackwardTill.class.getSimpleName()).version(version))) + .forwardPolicy(new ForwardPolicy().forwardPolicy(new ForwardTill().name(ForwardTill.class.getSimpleName()).version(version))) + ); String codecType = "custom"; // decodes @@ -54,13 +63,20 @@ public void testDecode() { assertNotNull(schemaInfo.getSchemaData()); assertNotNull(schemaInfo.getProperties()); - io.pravega.schemaregistry.contract.data.Compatibility compatibilityDecoded = ModelHelper.decode(compatibility); - assertEquals(compatibilityDecoded.getCompatibility(), io.pravega.schemaregistry.contract.data.Compatibility.Type.BackwardAndForwardTill); + io.pravega.schemaregistry.contract.data.Compatibility compatibilityDecoded = ModelHelper.decode(backwardTillForwardTill); + assertNotNull(compatibilityDecoded.getBackwardAndForward()); + io.pravega.schemaregistry.contract.data.BackwardAndForward backwardAndForward = + compatibilityDecoded.getBackwardAndForward(); + assertTrue(backwardAndForward.getBackwardPolicy() instanceof io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardTill); + assertTrue(backwardAndForward.getForwardPolicy() instanceof io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardTill); + + io.pravega.schemaregistry.contract.data.Compatibility backwardDecoded = ModelHelper.decode(backwardTillForwardTill); + assertNotNull(backwardDecoded.getBackwardAndForward()); + io.pravega.schemaregistry.contract.data.BackwardAndForward backwardAndForwardDecoded = + backwardDecoded.getBackwardAndForward(); + assertTrue(backwardAndForwardDecoded.getBackwardPolicy() instanceof io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardTill); + assertTrue(backwardAndForwardDecoded.getForwardPolicy() instanceof io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardTill); - io.pravega.schemaregistry.contract.data.SchemaValidationRules rulesDecoded = ModelHelper.decode(rules); - assertEquals(rulesDecoded.getRules().size(), 1); - assertEquals(rulesDecoded.getRules().values().iterator().next().getName(), Compatibility.class.getSimpleName()); - io.pravega.schemaregistry.contract.data.VersionInfo versionInfo = ModelHelper.decode(version); assertEquals(versionInfo.getType(), version.getType()); assertEquals(versionInfo.getVersion(), version.getVersion().intValue()); @@ -83,12 +99,13 @@ public void testEncode() { io.pravega.schemaregistry.contract.data.SchemaInfo schemaInfo = new io.pravega.schemaregistry.contract.data.SchemaInfo( "name", serializationFormat, ByteBuffer.wrap(new byte[0]), ImmutableMap.of()); io.pravega.schemaregistry.contract.data.VersionInfo versionInfo = new io.pravega.schemaregistry.contract.data.VersionInfo("a", 0, 1); - io.pravega.schemaregistry.contract.data.Compatibility rule = io.pravega.schemaregistry.contract.data.Compatibility.backwardTillAndForwardTill( - new io.pravega.schemaregistry.contract.data.VersionInfo("a", 0, 0), - new io.pravega.schemaregistry.contract.data.VersionInfo("a", 1, 1)); - io.pravega.schemaregistry.contract.data.SchemaValidationRules schemaValidationRules = io.pravega.schemaregistry.contract.data.SchemaValidationRules.of(rule); + io.pravega.schemaregistry.contract.data.Compatibility compatibility = io.pravega.schemaregistry.contract.data.Compatibility + .backwardTillAndForwardTill( + new io.pravega.schemaregistry.contract.data.VersionInfo("a", 0, 0), + new io.pravega.schemaregistry.contract.data.VersionInfo("a", 1, 1)); + io.pravega.schemaregistry.contract.data.GroupProperties prop = io.pravega.schemaregistry.contract.data.GroupProperties - .builder().serializationFormat(serializationFormat).schemaValidationRules(schemaValidationRules) + .builder().serializationFormat(serializationFormat).compatibility(compatibility) .allowMultipleTypes(true).properties(ImmutableMap.of()).build(); String codecType = "codecType"; @@ -108,35 +125,104 @@ public void testEncode() { EncodingId encodingId = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.EncodingId(0)); assertEquals(encodingId.getEncodingId().intValue(), 0); - + EncodingInfo encodingInfo = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.EncodingInfo(versionInfo, schemaInfo, codecType)); assertEquals(encodingInfo.getCodecType(), codecType); assertEquals(encodingInfo.getVersionInfo(), version); assertEquals(encodingInfo.getSchemaInfo(), schema); - SchemaValidationRules rules = ModelHelper.encode(schemaValidationRules); - assertEquals(rules.getRules().size(), 1); + Compatibility rules = ModelHelper.encode(compatibility); + assertEquals(rules.getPolicy(), Compatibility.PolicyEnum.BACKWARDANDFORWARD); + assertTrue(rules.getBackwardAndForward().getBackwardPolicy().getBackwardPolicy() instanceof BackwardTill); + assertTrue(rules.getBackwardAndForward().getForwardPolicy().getForwardPolicy() instanceof ForwardTill); - io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord schemaEvolution = ModelHelper.encode(new GroupHistoryRecord( - schemaInfo, versionInfo, schemaValidationRules, 100L, "")); + GroupHistoryRecord schemaEvolution = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.GroupHistoryRecord( + schemaInfo, versionInfo, compatibility, 100L, "")); assertEquals(schemaEvolution.getSchemaInfo(), schema); - assertEquals(schemaEvolution.getValidationRules(), rules); + assertEquals(schemaEvolution.getCompatibility(), rules); assertEquals(schemaEvolution.getVersion(), version); assertEquals(schemaEvolution.getTimestamp().longValue(), 100L); assertEquals(schemaEvolution.getSchemaString(), ""); - - Compatibility compatibility = ModelHelper.encode(rule); - assertEquals(compatibility.getPolicy(), Compatibility.PolicyEnum.BACKWARDANDFORWARDTILL); - + SchemaWithVersion schemaWithVersion = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.SchemaWithVersion(schemaInfo, versionInfo)); assertEquals(schemaWithVersion.getSchemaInfo(), schema); assertEquals(schemaWithVersion.getVersion(), version); GroupProperties groupProperties = ModelHelper.encode(prop); assertEquals(groupProperties.getSerializationFormat(), type); - assertEquals(groupProperties.getSchemaValidationRules(), rules); + assertEquals(groupProperties.getCompatibility(), rules); assertEquals(groupProperties.isAllowMultipleTypes(), prop.isAllowMultipleTypes()); assertEquals(groupProperties.getProperties(), prop.getProperties()); } + @Test + public void testEncodeAndDecodeCompatibility() { + io.pravega.schemaregistry.contract.data.Compatibility compatibility = + io.pravega.schemaregistry.contract.data.Compatibility.allowAny(); + Compatibility encoded = ModelHelper.encode(compatibility); + io.pravega.schemaregistry.contract.data.Compatibility decoded = ModelHelper.decode(encoded); + assertEquals(compatibility, decoded); + + compatibility = io.pravega.schemaregistry.contract.data.Compatibility.denyAll(); + encoded = ModelHelper.encode(compatibility); + decoded = ModelHelper.decode(encoded); + assertEquals(compatibility, decoded); + + compatibility = io.pravega.schemaregistry.contract.data.Compatibility.backward(); + encoded = ModelHelper.encode(compatibility); + decoded = ModelHelper.decode(encoded); + assertEquals(compatibility, decoded); + + compatibility = io.pravega.schemaregistry.contract.data.Compatibility.forward(); + encoded = ModelHelper.encode(compatibility); + decoded = ModelHelper.decode(encoded); + assertEquals(compatibility, decoded); + + compatibility = io.pravega.schemaregistry.contract.data.Compatibility.backwardTransitive(); + encoded = ModelHelper.encode(compatibility); + decoded = ModelHelper.decode(encoded); + assertEquals(compatibility, decoded); + + compatibility = io.pravega.schemaregistry.contract.data.Compatibility.forwardTransitive(); + encoded = ModelHelper.encode(compatibility); + decoded = ModelHelper.decode(encoded); + assertEquals(compatibility, decoded); + + compatibility = io.pravega.schemaregistry.contract.data.Compatibility.full(); + encoded = ModelHelper.encode(compatibility); + decoded = ModelHelper.decode(encoded); + assertEquals(compatibility, decoded); + + compatibility = io.pravega.schemaregistry.contract.data.Compatibility.fullTransitive(); + encoded = ModelHelper.encode(compatibility); + decoded = ModelHelper.decode(encoded); + assertEquals(compatibility, decoded); + + io.pravega.schemaregistry.contract.data.VersionInfo versionInfo = new io.pravega.schemaregistry.contract.data.VersionInfo("a", 1, 1); + compatibility = io.pravega.schemaregistry.contract.data.Compatibility.backwardTill(versionInfo); + encoded = ModelHelper.encode(compatibility); + decoded = ModelHelper.decode(encoded); + assertEquals(compatibility, decoded); + + compatibility = io.pravega.schemaregistry.contract.data.Compatibility.forwardTill(versionInfo); + encoded = ModelHelper.encode(compatibility); + decoded = ModelHelper.decode(encoded); + assertEquals(compatibility, decoded); + + compatibility = io.pravega.schemaregistry.contract.data.Compatibility.backwardTillAndForwardTill(versionInfo, versionInfo); + encoded = ModelHelper.encode(compatibility); + decoded = ModelHelper.decode(encoded); + assertEquals(compatibility, decoded); + + compatibility = io.pravega.schemaregistry.contract.data.Compatibility.backwardOneAndForwardTill(versionInfo); + encoded = ModelHelper.encode(compatibility); + decoded = ModelHelper.decode(encoded); + assertEquals(compatibility, decoded); + + compatibility = io.pravega.schemaregistry.contract.data.Compatibility.backwardTillAndForwardOne(versionInfo); + encoded = ModelHelper.encode(compatibility); + decoded = ModelHelper.decode(encoded); + assertEquals(compatibility, decoded); + + } } From 0b6230a9ca4e222e7d2db46e13ac83a629d23af2 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 18 Jun 2020 05:07:10 -0700 Subject: [PATCH 17/70] license Signed-off-by: Shivesh Ranjan --- .../schemaregistry/client/SchemaRegistryClientImpl.java | 2 +- .../schemaregistry/client/TestSchemaRegistryClient.java | 2 +- .../schemaregistry/contract/data/BackwardAndForward.java | 6 +++--- .../pravega/schemaregistry/contract/data/Compatibility.java | 6 +++--- .../io/pravega/schemaregistry/contract/data/SchemaInfo.java | 2 +- .../schemaregistry/contract/data/SerializationFormat.java | 2 +- .../schemaregistry/contract/transform/ModelHelper.java | 6 +++--- .../java/io/pravega/schemaregistry/contract/v1/ApiV1.java | 2 +- .../schemaregistry/contract/transform/ModelHelperTest.java | 6 +++--- 9 files changed, 17 insertions(+), 17 deletions(-) diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java index 5314e89db..113f7894e 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.client; diff --git a/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java b/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java index d80b4ce84..5ccc3ee11 100644 --- a/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java +++ b/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.client; diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/BackwardAndForward.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/BackwardAndForward.java index 9b3c2fe4c..460111e7a 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/BackwardAndForward.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/BackwardAndForward.java @@ -1,11 +1,11 @@ /** * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - *

+ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 + * + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.contract.data; diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java index 1f6b262a4..e2d8da0ac 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java @@ -1,11 +1,11 @@ /** * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - *

+ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 + * + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.contract.data; diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaInfo.java index bc4ed9f62..c71f655b2 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaInfo.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SchemaInfo.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.contract.data; diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java index 4d7523345..2cee9f4fc 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.contract.data; diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java index 6e028b71d..caa030eff 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java @@ -1,11 +1,11 @@ /** * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - *

+ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 + * + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.contract.transform; diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java index 06201971b..5f693e218 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.contract.v1; diff --git a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java index c229b26dd..73b3e58d0 100644 --- a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java +++ b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java @@ -1,11 +1,11 @@ /** * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - *

+ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 + * + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.contract.transform; From 911f793a37c50c02685d271f65be304d64164a5a Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 18 Jun 2020 05:08:17 -0700 Subject: [PATCH 18/70] merge with contract Signed-off-by: Shivesh Ranjan --- .../serializers/SerializerConfig.java | 16 ++++++++-------- .../testobjs/SchemaDefinitions.java | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index 276d71943..9755468ba 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -1,11 +1,11 @@ /** * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * + *

* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.serializers; @@ -18,7 +18,6 @@ import io.pravega.schemaregistry.contract.data.Compatibility; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.GroupProperties; -import io.pravega.schemaregistry.contract.data.SchemaValidationRules; import io.pravega.schemaregistry.contract.data.SerializationFormat; import lombok.AccessLevel; import lombok.Builder; @@ -43,6 +42,7 @@ public class SerializerConfig { private final static Codec GZIP = CodecFactory.gzip(); private final static Codec SNAPPY = CodecFactory.snappy(); + /** * Name of the group. */ @@ -113,21 +113,21 @@ public SerializerConfigBuilder createGroup(SerializationFormat serializationForm } public SerializerConfigBuilder createGroup(SerializationFormat serializationFormat, boolean allowMultipleTypes) { - return createGroup(serializationFormat, SchemaValidationRules.of(Compatibility.fullTransitive()), allowMultipleTypes); + return createGroup(serializationFormat, Compatibility.fullTransitive(), allowMultipleTypes); } - public SerializerConfigBuilder createGroup(SerializationFormat serializationFormat, SchemaValidationRules rules, boolean allowMultipleTypes) { + public SerializerConfigBuilder createGroup(SerializationFormat serializationFormat, Compatibility rules, boolean allowMultipleTypes) { this.createGroup = true; this.groupProperties = new GroupProperties(serializationFormat, rules, allowMultipleTypes); return this; } - + public SerializerConfigBuilder registryClient(SchemaRegistryClient client) { Preconditions.checkArgument(client != null); this.registryConfigOrClient = Either.right(client); return this; } - + public SerializerConfigBuilder registryConfig(SchemaRegistryClientConfig config) { Preconditions.checkArgument(config != null); this.registryConfigOrClient = Either.left(config); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java index ba4fa50fe..fd5830720 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java @@ -29,7 +29,7 @@ public class SchemaDefinitions { .noDefault() .name("b") .type(Schema.create(Schema.Type.STRING)) - .withDefault("backward compatible with schema1") + .withDefault("backwardPolicy compatible with schema1") .endRecord(); public static final Schema SCHEMA3 = SchemaBuilder From c9f9d3f5541dd1a0bfb6a26eaaa34f1c9a9e19f0 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 18 Jun 2020 05:10:57 -0700 Subject: [PATCH 19/70] license Signed-off-by: Shivesh Ranjan --- .../java/io/pravega/schemaregistry/common/HashUtil.java | 2 +- .../java/io/pravega/schemaregistry/common/NameUtil.java | 2 +- .../serializers/MultipleFormatJsonStringDeserializer.java | 2 +- .../schemaregistry/serializers/PravegaDeserializer.java | 2 +- .../schemaregistry/serializers/PravegaSerializer.java | 2 +- .../schemaregistry/serializers/SerializerConfig.java | 6 +++--- .../schemaregistry/serializers/SerializerFactory.java | 2 +- .../pravega/schemaregistry/serializers/SerializerTest.java | 2 +- .../io/pravega/schemaregistry/testobjs/DerivedUser1.java | 2 +- .../io/pravega/schemaregistry/testobjs/DerivedUser2.java | 2 +- .../test/java/io/pravega/schemaregistry/testobjs/User.java | 2 +- 11 files changed, 13 insertions(+), 13 deletions(-) diff --git a/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java b/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java index b3597bf1c..e03b29981 100644 --- a/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java +++ b/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.common; diff --git a/common/src/main/java/io/pravega/schemaregistry/common/NameUtil.java b/common/src/main/java/io/pravega/schemaregistry/common/NameUtil.java index 4bfe715f3..72ef5e158 100644 --- a/common/src/main/java/io/pravega/schemaregistry/common/NameUtil.java +++ b/common/src/main/java/io/pravega/schemaregistry/common/NameUtil.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.common; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java index f77a4a084..10a816293 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.serializers; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaDeserializer.java index 96dbfd944..cfb286bcd 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaDeserializer.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.serializers; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaSerializer.java index fa1b3fe38..6afe07cc1 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaSerializer.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.serializers; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index 9755468ba..7f526aeef 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -1,11 +1,11 @@ /** * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - *

+ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 + * + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.serializers; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index 3335127c0..ab1b30982 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.serializers; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index 79abb783e..dc492abfb 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.serializers; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser1.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser1.java index 3e448b694..bbf8651e2 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser1.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser1.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.testobjs; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser2.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser2.java index 5d8a7aef2..111caef30 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser2.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/DerivedUser2.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.testobjs; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/User.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/User.java index 6ff4f2322..fb7129bf3 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/User.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/User.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.testobjs; From 5d9157478c092b587bb1567084f5778e31064b45 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 18 Jun 2020 06:07:47 -0700 Subject: [PATCH 20/70] javadoc statement Signed-off-by: Shivesh Ranjan --- .../pravega/schemaregistry/client/SchemaRegistryClient.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java index 238b4b80f..3b9e629ab 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java @@ -63,8 +63,8 @@ public interface SchemaRegistryClient { * List all groups that the user is authorized on. This returns an iterator where each element is a pair of group * name and group properties. * This iterator can be used to iterate over each element until all elements are exhausted. - * The implementation should guarantee that all groups added before and until the iterator returns - * {@link Iterator#hasNext()} = true can be iterated over. + * The implementation should guarantee that all groups added before and till the iterator continues to return + * {@link Iterator#hasNext()} = true should be available for iteration. * * @return map of names of groups with corresponding group properties for all groups. * @throws UnauthorizedException if the user is unauthorized. From 98a9c6b62aa3572c01980fba77e27d0bf3e04bad Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 18 Jun 2020 06:37:31 -0700 Subject: [PATCH 21/70] javadoc Signed-off-by: Shivesh Ranjan --- .../client/SchemaRegistryClient.java | 27 ++++++++++--------- .../client/SchemaRegistryClientImpl.java | 6 ++--- .../client/exceptions/RegistryExceptions.java | 2 +- .../contract/data/Compatibility.java | 24 ++++++++--------- 4 files changed, 30 insertions(+), 29 deletions(-) diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java index 3b9e629ab..8234ec569 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java @@ -39,7 +39,7 @@ public interface SchemaRegistryClient { * Add group is idempotent. If the group by the same id already exists the api will return false. * * @param groupId Id for the group that uniquely identifies the group. - * @param groupProperties groupProperties Group properties for the group. These include serialization format, validation rules, + * @param groupProperties groupProperties Group properties for the group. These include serialization format, compatibility policy, * and flag to declare whether multiple schemas representing distinct object types can be * registered with the group. Type identify objects of same type. Schema compatibility checks * are always performed for schemas that share same {@link SchemaInfo#type}. @@ -88,19 +88,19 @@ public interface SchemaRegistryClient { GroupProperties getGroupProperties(String groupId) throws ResourceNotFoundException, UnauthorizedException; /** - * Update group's schema validation policy. If previous rules are not supplied, then the update to the rules will be - * performed unconditionally. However, if previous rules are supplied, then the update will be performed if and only if - * existing {@link GroupProperties#compatibility} match previous rules. + * Update group's schema validation policy. If previous compatibility policy are not supplied, then the update to the policy will be + * performed unconditionally. However, if previous compatibility policy are supplied, then the update will be performed if and only if + * existing {@link GroupProperties#compatibility} match previous compatibility policy. * * @param groupId Id for the group. - * @param validationRules New Compatibility for the group. - * @param previousRules Previous compatibility. + * @param compatibility New Compatibility for the group. + * @param previous Previous compatibility. * @return true if the update was accepted by the service, false if it was rejected because of precondition failure. - * Precondition failure can occur if previous rules were specified and they do not match the rules set on the group. + * Precondition failure can occur if previous compatibility policy were specified and they do not match the policy set on the group. * @throws ResourceNotFoundException if group is not found. * @throws UnauthorizedException if the user is unauthorized. */ - boolean updateCompatibility(String groupId, Compatibility validationRules, @Nullable Compatibility previousRules) + boolean updateCompatibility(String groupId, Compatibility compatibility, @Nullable Compatibility previous) throws ResourceNotFoundException, UnauthorizedException; /** @@ -281,18 +281,18 @@ SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schema * * @param groupId Id for the group. * @param schemaType type of object identified by {@link SchemaInfo#type}. - * @return Ordered list of schemas with versions and validation rules for all schemas in the group. + * @return Ordered list of schemas with versions and compatibility policy for all schemas in the group. * @throws ResourceNotFoundException if group is not found. * @throws UnauthorizedException if the user is unauthorized. */ List getSchemaVersions(String groupId, @Nullable String schemaType) throws ResourceNotFoundException, UnauthorizedException; /** - * Checks whether given schema is valid by applying validation rules against previous schemas in the group + * Checks whether given schema is valid by applying compatibility policy against previous schemas in the group * subject to current {@link GroupProperties#compatibility} policy. * The invocation of this method will perform exactly the same validations as {@link SchemaRegistryClient#addSchema(String, SchemaInfo)} * but without registering the schema. This is primarily intended to be used during schema development phase to validate that - * the changes to schema are in compliance with validation rules for the group. + * the changes to schema are in compliance with compatibility policy for the group. * * @param groupId Id for the group. * @param schemaInfo Schema to check for validity. @@ -336,14 +336,15 @@ SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schema void addCodecType(String groupId, String codecType) throws ResourceNotFoundException, UnauthorizedException; /** - * Gets complete schema evolution history of the group with schemas, versions, rules and time for the group. + * Gets complete schema evolution history of the group with schemas, versions, compatibility policy and + * time when the schema was added to the group. * The order in the list matches the order in which schemas were evolved within the group. * This call is atomic and will get a consistent view at the time when the request is processed on the service. * So all schemas that were added before this call are returned and all schemas that were deleted before this call * are excluded. * * @param groupId Id for the group. - * @return Ordered list of schemas with versions and validation rules for all schemas in the group. + * @return Ordered list of schemas with versions and compatibility policy for all schemas in the group. * @throws ResourceNotFoundException if group is not found. * @throws UnauthorizedException if the user is unauthorized. */ diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java index 113f7894e..d7a5da869 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java @@ -155,12 +155,12 @@ public GroupProperties getGroupProperties(String groupId) { } @Override - public boolean updateCompatibility(String groupId, Compatibility compatibility, @Nullable Compatibility previousRules) { + public boolean updateCompatibility(String groupId, Compatibility compatibility, @Nullable Compatibility previous) { return withRetry(() -> { UpdateCompatibilityRequest request = new UpdateCompatibilityRequest() .compatibility(ModelHelper.encode(compatibility)); - if (previousRules != null) { - request.setPreviousCompatibility(ModelHelper.encode(previousRules)); + if (previous != null) { + request.setPreviousCompatibility(ModelHelper.encode(previous)); } Response response = groupProxy.updateCompatibility(groupId, request); diff --git a/client/src/main/java/io/pravega/schemaregistry/client/exceptions/RegistryExceptions.java b/client/src/main/java/io/pravega/schemaregistry/client/exceptions/RegistryExceptions.java index 7bbb28966..57d0cbe46 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/exceptions/RegistryExceptions.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/exceptions/RegistryExceptions.java @@ -140,7 +140,7 @@ public MalformedSchemaException(String errorMessage) { } /** - * The schema validation failed as it was validated against the ValidationRules set for the group. + * The schema validation failed as it was validated against the Compatibility set for the group. */ public static class SchemaValidationFailedException extends RegistryExceptions { public SchemaValidationFailedException(String errorMessage) { diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java index e2d8da0ac..9cd60be43 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java @@ -67,10 +67,6 @@ public static Compatibility allowAny() { public static Compatibility denyAll() { return new Compatibility(Type.DenyAll); } - - private static Compatibility backwardAndForward(BackwardAndForward backwardAndForward) { - return new Compatibility(Type.BackwardAndForward, backwardAndForward); - } /** * Method to create a compatibility policy of type backwardPolicy. BackwardPolicy policy implies new schema will be validated @@ -157,14 +153,14 @@ public static Compatibility fullTransitive() { } /** - * Method to create a schemaValidationRules policy of type backwardPolicy till and forwardOne till. This is a combination of - * backwardPolicy till and forwardOne till policies. + * Method to create a schema compatibility policy of type backwardPolicy till and forward till. This is a combination of + * backwardPolicy till and forward till policies. * All previous schemas till schema identified by version specified with {@link BackwardAndForward.BackwardTill} policy * can read data written by new schema. New schema can be used to read data written by any of previous schemas till schema * identified by version {@link BackwardAndForward.ForwardTill}. * - * @param backwardTill version till which backwardPolicy schemaValidationRules is checked for. - * @param forwardTill version till which forwardOne schemaValidationRules is checked for. + * @param backwardTill version till which backwardPolicy schema compatibility is checked for. + * @param forwardTill version till which forwardOne schema compatibility is checked for. * @return Compatibility policy with backwardTill check And ForwardTill check. */ public static Compatibility backwardTillAndForwardTill(VersionInfo backwardTill, VersionInfo forwardTill) { @@ -172,12 +168,12 @@ public static Compatibility backwardTillAndForwardTill(VersionInfo backwardTill, } /** - * Method to create a schemaValidationRules policy of type backwardPolicy one and forwardOne till. + * Method to create a schema compatibility policy of type backwardPolicy one and forward till. * * All previous schemas till schema identified by version {@link BackwardAndForward.ForwardTill} * can read data written by new schema. New schema can be used to read data written by previous schema. * - * @param forwardTill version till which forwardTill schemaValidationRules is checked for. + * @param forwardTill version till which forwardTill schema compatibility is checked for. * @return Compatibility policy that describes backward check And ForwardTill check. */ public static Compatibility backwardOneAndForwardTill(VersionInfo forwardTill) { @@ -185,18 +181,22 @@ public static Compatibility backwardOneAndForwardTill(VersionInfo forwardTill) { } /** - * Method to create a schemaValidationRules policy of type backwardPolicy till one and forwardOne one. + * Method to create a schema compatibility policy of type backwardPolicy till one and forwardOne one. * * All previous schemas till schema identified by version {@link BackwardAndForward.BackwardTill} * can read data written by new schema. New schema can be used to read data written by previous schema. * - * @param backwardTill version till which backwardTill schemaValidationRules is checked for. + * @param backwardTill version till which backwardTill schema compatibility is checked for. * @return BackwardAndForward with backwardTill check And Forward check. */ public static Compatibility backwardTillAndForwardOne(VersionInfo backwardTill) { return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.BackwardTill(backwardTill), new BackwardAndForward.Forward())); } + private static Compatibility backwardAndForward(BackwardAndForward backwardAndForward) { + return new Compatibility(Type.BackwardAndForward, backwardAndForward); + } + public static class CompatibilityBuilder implements ObjectBuilder { } } From 28660e4bb9ebae6381a725f94f5eaad4d58bfa10 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 18 Jun 2020 08:21:04 -0700 Subject: [PATCH 22/70] deserializerAsJson name fix Signed-off-by: Shivesh Ranjan --- .../serializers/JsonGenericDeserlizer.java | 5 +++++ .../schemaregistry/serializers/JsonSerializer.java | 9 ++++++--- .../schemaregistry/serializers/SerializerFactory.java | 2 +- .../schemaregistry/serializers/SerializerTest.java | 2 +- 4 files changed, 13 insertions(+), 5 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java index bc9dfce51..be0240c03 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java @@ -9,6 +9,8 @@ */ package io.pravega.schemaregistry.serializers; +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.PropertyAccessor; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.module.jsonSchema.JsonSchema; @@ -34,6 +36,9 @@ class JsonGenericDeserlizer extends AbstractPravegaDeserializer() { @Override public JsonSchema load(SchemaInfo schemaInfo) throws Exception { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java index dbf1c0676..facd15dc5 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java @@ -9,8 +9,9 @@ */ package io.pravega.schemaregistry.serializers; +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.PropertyAccessor; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Charsets; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.codec.Codec; import io.pravega.schemaregistry.contract.data.SchemaInfo; @@ -25,13 +26,15 @@ class JsonSerializer extends AbstractPravegaSerializer { Codec codec, boolean registerSchema) { super(groupId, client, schema, codec, registerSchema); objectMapper = new ObjectMapper(); + objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); + objectMapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); + objectMapper.setVisibility(PropertyAccessor.CREATOR, JsonAutoDetect.Visibility.ANY); } @SneakyThrows @Override protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream) { - String json = objectMapper.writeValueAsString(var); - outputStream.write(json.getBytes(Charsets.UTF_8)); + objectMapper.writeValue(outputStream, var); outputStream.flush(); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index ab1b30982..f66a502ba 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -626,7 +626,7 @@ public static Serializer multiFormatGenericDeserializer(SerializerConfig * @param config serializer config * @return a deserializer that can deserialize protobuf, json or avro events into java objects. */ - public static Serializer deserializerAsJsonString(SerializerConfig config) { + public static Serializer deserializeAsJsonString(SerializerConfig config) { String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index dc492abfb..79b8c6c0e 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -301,7 +301,7 @@ public void testMultiformatDeserializers() throws IOException { deserialized = deserializer.deserialize(serializedJson); assertTrue(deserialized instanceof JSonGenericObject); - Serializer jsonStringDeserializer = SerializerFactory.deserializerAsJsonString(config); + Serializer jsonStringDeserializer = SerializerFactory.deserializeAsJsonString(config); serializedAvro.position(0); String jsonString = jsonStringDeserializer.deserialize(serializedAvro); assertNotNull(jsonString); From 53b8988bff341705263abbf4a7689e58464504d3 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 18 Jun 2020 21:37:19 -0700 Subject: [PATCH 23/70] compatibility enum reversal Signed-off-by: Shivesh Ranjan --- .../client/TestSchemaRegistryClient.java | 7 +-- .../contract/data/BackwardAndForward.java | 5 +- .../contract/data/Compatibility.java | 56 ++++++++++--------- .../generated/rest/model/Compatibility.java | 40 ++++++++----- .../contract/transform/ModelHelper.java | 50 ++++++++++++----- contract/src/main/swagger/SchemaRegistry.yaml | 11 +++- .../contract/transform/ModelHelperTest.java | 14 ++--- 7 files changed, 116 insertions(+), 67 deletions(-) diff --git a/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java b/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java index 5ccc3ee11..10824420c 100644 --- a/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java +++ b/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java @@ -11,7 +11,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import io.pravega.schemaregistry.contract.data.BackwardAndForward; import io.pravega.schemaregistry.contract.data.Compatibility; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; @@ -92,7 +91,7 @@ public void testGroup() { Map.Entry group = groups.stream().filter(x -> x.getKey().equals(groupName)).findAny().orElseThrow(RuntimeException::new); assertEquals(group.getValue().getSerializationFormat(), SerializationFormat.Any); - assertTrue(group.getValue().getCompatibility().getBackwardAndForward().getBackwardPolicy() instanceof BackwardAndForward.Backward); + assertEquals(group.getValue().getCompatibility(), Compatibility.backward()); reset(response); } @@ -125,7 +124,7 @@ public void testListGroup() { Map.Entry group = groups.stream().filter(x -> x.getKey().equals(groupId)).findAny().orElseThrow(RuntimeException::new); assertEquals(group.getValue().getSerializationFormat(), SerializationFormat.Any); - assertTrue(group.getValue().getCompatibility().getBackwardAndForward().getBackwardPolicy() instanceof BackwardAndForward.Backward); + assertEquals(group.getValue().getCompatibility(), Compatibility.backward()); // Runtime Exception doReturn(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()).when(response).getStatus(); @@ -166,7 +165,7 @@ public void testGetGroupProperties() { doReturn(mygroup).when(response).readEntity(eq(GroupProperties.class)); io.pravega.schemaregistry.contract.data.GroupProperties groupProperties = client.getGroupProperties("mygroup"); assertEquals(groupProperties.getSerializationFormat(), SerializationFormat.Any); - assertTrue(groupProperties.getCompatibility().getBackwardAndForward().getBackwardPolicy() instanceof BackwardAndForward.Backward); + assertEquals(groupProperties.getCompatibility(), Compatibility.backward()); // ResourceNotFoundException doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); AssertExtensions.assertThrows("An exception should have been thrown", () -> client.getGroupProperties( diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/BackwardAndForward.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/BackwardAndForward.java index 460111e7a..0af2bf9e2 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/BackwardAndForward.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/BackwardAndForward.java @@ -31,7 +31,10 @@ @Data @Builder public class BackwardAndForward { - + public static final BackwardPolicy BACKWARD = new Backward(); + public static final BackwardPolicy BACKWARD_TRANSITIVE = new BackwardTransitive(); + public static final ForwardPolicy FORWARD = new Forward(); + public static final ForwardPolicy FORWARD_TRANSITIVE = new ForwardTransitive(); private final BackwardPolicy backwardPolicy; private final ForwardPolicy forwardPolicy; diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java index 9cd60be43..11ba771ba 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/Compatibility.java @@ -1,10 +1,10 @@ /** * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.contract.data; @@ -34,7 +34,7 @@ private Compatibility(Type type) { } private Compatibility(Type type, BackwardAndForward backwardAndForward) { - Preconditions.checkArgument(!type.equals(Type.BackwardAndForward) || backwardAndForward != null); + Preconditions.checkArgument(!type.equals(Type.Advanced) || backwardAndForward != null); this.type = type; this.backwardAndForward = backwardAndForward; } @@ -42,12 +42,18 @@ private Compatibility(Type type, BackwardAndForward backwardAndForward) { /** * {@link Type#AllowAny}: allow any changes to schema without any checks performed by the registry. * {@link Type#DenyAll}: disables any changes to the schema for the group. - * {@link Type#BackwardAndForward}: + * {@link Type#Advanced}: */ public enum Type { AllowAny, DenyAll, - BackwardAndForward, + Backward, + Forward, + BackwardTransitive, + ForwardTransitive, + Full, + FullTransitive, + Advanced, } /** @@ -67,7 +73,7 @@ public static Compatibility allowAny() { public static Compatibility denyAll() { return new Compatibility(Type.DenyAll); } - + /** * Method to create a compatibility policy of type backwardPolicy. BackwardPolicy policy implies new schema will be validated * to be capable of reading data written using the previous schema. @@ -75,7 +81,7 @@ public static Compatibility denyAll() { * @return Compatibility policy with Backward check. */ public static Compatibility backward() { - return backwardAndForward(new BackwardAndForward(new BackwardAndForward.Backward(), null)); + return new Compatibility(Type.Backward); } /** @@ -86,7 +92,7 @@ public static Compatibility backward() { * @return Compatibility policy with BackwardTill version check. */ public static Compatibility backwardTill(VersionInfo backwardTill) { - return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.BackwardTill(backwardTill), null)); + return backwardAndForward(new BackwardAndForward(new BackwardAndForward.BackwardTill(backwardTill), null)); } /** @@ -96,7 +102,7 @@ public static Compatibility backwardTill(VersionInfo backwardTill) { * @return Compatibility policy with BackwardTransitive check. */ public static Compatibility backwardTransitive() { - return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.BackwardTransitive(), null)); + return new Compatibility(Type.BackwardTransitive); } /** @@ -106,7 +112,7 @@ public static Compatibility backwardTransitive() { * @return Compatibility policy with Forward compatibility check. */ public static Compatibility forward() { - return Compatibility.backwardAndForward(new BackwardAndForward(null, new BackwardAndForward.Forward())); + return new Compatibility(Type.Forward); } /** @@ -117,7 +123,7 @@ public static Compatibility forward() { * @return Compatibility policy with ForwardTill check. */ public static Compatibility forwardTill(VersionInfo forwardTill) { - return Compatibility.backwardAndForward(new BackwardAndForward(null, new BackwardAndForward.ForwardTill(forwardTill))); + return backwardAndForward(new BackwardAndForward(null, new BackwardAndForward.ForwardTill(forwardTill))); } /** @@ -128,7 +134,7 @@ public static Compatibility forwardTill(VersionInfo forwardTill) { * @return Compatibility policy with ForwardTransitive check. */ public static Compatibility forwardTransitive() { - return Compatibility.backwardAndForward(new BackwardAndForward(null, new BackwardAndForward.ForwardTransitive())); + return new Compatibility(Type.ForwardTransitive); } /** @@ -138,7 +144,7 @@ public static Compatibility forwardTransitive() { * @return Compatibility policy with Backward and Forward compatibility checks. */ public static Compatibility full() { - return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.Backward(), new BackwardAndForward.Forward())); + return new Compatibility(Type.Full); } /** @@ -149,52 +155,52 @@ public static Compatibility full() { * @return Compatibility policy of type Backward Transitive and Forward Transitive checks. */ public static Compatibility fullTransitive() { - return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.BackwardTransitive(), new BackwardAndForward.ForwardTransitive())); + return new Compatibility(Type.FullTransitive); } /** - * Method to create a schema compatibility policy of type backwardPolicy till and forward till. This is a combination of + * Method to create a compatibility policy of type backwardPolicy till and forward till. This is a combination of * backwardPolicy till and forward till policies. * All previous schemas till schema identified by version specified with {@link BackwardAndForward.BackwardTill} policy * can read data written by new schema. New schema can be used to read data written by any of previous schemas till schema * identified by version {@link BackwardAndForward.ForwardTill}. * - * @param backwardTill version till which backwardPolicy schema compatibility is checked for. - * @param forwardTill version till which forwardOne schema compatibility is checked for. + * @param backwardTill version till which backwardPolicy compatibility is checked for. + * @param forwardTill version till which forward compatibility is checked for. * @return Compatibility policy with backwardTill check And ForwardTill check. */ public static Compatibility backwardTillAndForwardTill(VersionInfo backwardTill, VersionInfo forwardTill) { - return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.BackwardTill(backwardTill), new BackwardAndForward.ForwardTill(forwardTill))); + return backwardAndForward(new BackwardAndForward(new BackwardAndForward.BackwardTill(backwardTill), new BackwardAndForward.ForwardTill(forwardTill))); } /** - * Method to create a schema compatibility policy of type backwardPolicy one and forward till. + * Method to create a compatibility policy of type backwardPolicy one and forward till. * * All previous schemas till schema identified by version {@link BackwardAndForward.ForwardTill} * can read data written by new schema. New schema can be used to read data written by previous schema. * - * @param forwardTill version till which forwardTill schema compatibility is checked for. + * @param forwardTill version till which forwardTill compatibility is checked for. * @return Compatibility policy that describes backward check And ForwardTill check. */ public static Compatibility backwardOneAndForwardTill(VersionInfo forwardTill) { - return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.Backward(), new BackwardAndForward.ForwardTill(forwardTill))); + return backwardAndForward(new BackwardAndForward(BackwardAndForward.BACKWARD, new BackwardAndForward.ForwardTill(forwardTill))); } /** - * Method to create a schema compatibility policy of type backwardPolicy till one and forwardOne one. + * Method to create a compatibility policy of type backwardPolicy till one and forwardOne one. * * All previous schemas till schema identified by version {@link BackwardAndForward.BackwardTill} * can read data written by new schema. New schema can be used to read data written by previous schema. * - * @param backwardTill version till which backwardTill schema compatibility is checked for. + * @param backwardTill version till which backwardTill compatibility is checked for. * @return BackwardAndForward with backwardTill check And Forward check. */ public static Compatibility backwardTillAndForwardOne(VersionInfo backwardTill) { - return Compatibility.backwardAndForward(new BackwardAndForward(new BackwardAndForward.BackwardTill(backwardTill), new BackwardAndForward.Forward())); + return backwardAndForward(new BackwardAndForward(new BackwardAndForward.BackwardTill(backwardTill), BackwardAndForward.FORWARD)); } private static Compatibility backwardAndForward(BackwardAndForward backwardAndForward) { - return new Compatibility(Type.BackwardAndForward, backwardAndForward); + return new Compatibility(Type.Advanced, backwardAndForward); } public static class CompatibilityBuilder implements ObjectBuilder { diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java index 0f7805e31..8e1096651 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/Compatibility.java @@ -36,7 +36,19 @@ public enum PolicyEnum { DENYALL("DenyAll"), - BACKWARDANDFORWARD("BackwardAndForward"); + FORWARD("Forward"), + + BACKWARD("Backward"), + + BACKWARDTRANSITIVE("BackwardTransitive"), + + FORWARDTRANSITIVE("ForwardTransitive"), + + FULL("Full"), + + FULLTRANSITIVE("FullTransitive"), + + ADVANCED("Advanced"); private String value; @@ -64,8 +76,8 @@ public static PolicyEnum fromValue(String text) { @JsonProperty("policy") private PolicyEnum policy = null; - @JsonProperty("backwardAndForward") - private BackwardAndForward backwardAndForward = null; + @JsonProperty("advanced") + private BackwardAndForward advanced = null; public Compatibility policy(PolicyEnum policy) { this.policy = policy; @@ -87,23 +99,23 @@ public void setPolicy(PolicyEnum policy) { this.policy = policy; } - public Compatibility backwardAndForward(BackwardAndForward backwardAndForward) { - this.backwardAndForward = backwardAndForward; + public Compatibility advanced(BackwardAndForward advanced) { + this.advanced = advanced; return this; } /** * Backward and forward policy details. - * @return backwardAndForward + * @return advanced **/ - @JsonProperty("backwardAndForward") + @JsonProperty("advanced") @ApiModelProperty(value = "Backward and forward policy details.") - public BackwardAndForward getBackwardAndForward() { - return backwardAndForward; + public BackwardAndForward getAdvanced() { + return advanced; } - public void setBackwardAndForward(BackwardAndForward backwardAndForward) { - this.backwardAndForward = backwardAndForward; + public void setAdvanced(BackwardAndForward advanced) { + this.advanced = advanced; } @@ -117,12 +129,12 @@ public boolean equals(java.lang.Object o) { } Compatibility compatibility = (Compatibility) o; return Objects.equals(this.policy, compatibility.policy) && - Objects.equals(this.backwardAndForward, compatibility.backwardAndForward); + Objects.equals(this.advanced, compatibility.advanced); } @Override public int hashCode() { - return Objects.hash(policy, backwardAndForward); + return Objects.hash(policy, advanced); } @@ -132,7 +144,7 @@ public String toString() { sb.append("class Compatibility {\n"); sb.append(" policy: ").append(toIndentedString(policy)).append("\n"); - sb.append(" backwardAndForward: ").append(toIndentedString(backwardAndForward)).append("\n"); + sb.append(" advanced: ").append(toIndentedString(advanced)).append("\n"); sb.append("}"); return sb.toString(); } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java index caa030eff..4a63928e7 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java @@ -1,10 +1,10 @@ /** * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.contract.transform; @@ -70,20 +70,42 @@ public static io.pravega.schemaregistry.contract.data.SerializationFormat decode } public static io.pravega.schemaregistry.contract.data.Compatibility decode(Compatibility compatibility) { - io.pravega.schemaregistry.contract.data.Compatibility.Type type = searchEnum(io.pravega.schemaregistry.contract.data.Compatibility.Type.class, compatibility.getPolicy().name()); - switch (type) { - case AllowAny: - return io.pravega.schemaregistry.contract.data.Compatibility.allowAny(); - case DenyAll: - return io.pravega.schemaregistry.contract.data.Compatibility.denyAll(); - case BackwardAndForward: - return io.pravega.schemaregistry.contract.data.Compatibility + io.pravega.schemaregistry.contract.data.Compatibility decoded; + switch (compatibility.getPolicy()) { + case ALLOWANY: + decoded = io.pravega.schemaregistry.contract.data.Compatibility.allowAny(); + break; + case BACKWARD: + decoded = io.pravega.schemaregistry.contract.data.Compatibility.backward(); + break; + case BACKWARDTRANSITIVE: + decoded = io.pravega.schemaregistry.contract.data.Compatibility.backwardTransitive(); + break; + case FORWARD: + decoded = io.pravega.schemaregistry.contract.data.Compatibility.forward(); + break; + case FORWARDTRANSITIVE: + decoded = io.pravega.schemaregistry.contract.data.Compatibility.forwardTransitive(); + break; + case FULL: + decoded = io.pravega.schemaregistry.contract.data.Compatibility.full(); + break; + case FULLTRANSITIVE: + decoded = io.pravega.schemaregistry.contract.data.Compatibility.fullTransitive(); + break; + case DENYALL: + decoded = io.pravega.schemaregistry.contract.data.Compatibility.denyAll(); + break; + case ADVANCED: + decoded = io.pravega.schemaregistry.contract.data.Compatibility .builder() - .type(type) - .backwardAndForward(decode(compatibility.getBackwardAndForward())).build(); + .type(io.pravega.schemaregistry.contract.data.Compatibility.Type.Advanced) + .backwardAndForward(decode(compatibility.getAdvanced())).build(); + break; default: throw new IllegalArgumentException(); } + return decoded; } public static io.pravega.schemaregistry.contract.data.BackwardAndForward decode(BackwardAndForward compatibility) { @@ -211,8 +233,8 @@ public static GroupHistoryRecord encode(io.pravega.schemaregistry.contract.data. public static Compatibility encode(io.pravega.schemaregistry.contract.data.Compatibility compatibility) { Compatibility policy = new io.pravega.schemaregistry.contract.generated.rest.model.Compatibility() .policy(searchEnum(Compatibility.PolicyEnum.class, compatibility.getType().name())); - if (policy.getPolicy().equals(Compatibility.PolicyEnum.BACKWARDANDFORWARD)) { - policy.backwardAndForward(encode(compatibility.getBackwardAndForward())); + if (policy.getPolicy().equals(Compatibility.PolicyEnum.ADVANCED)) { + policy.advanced(encode(compatibility.getBackwardAndForward())); } return policy; } diff --git a/contract/src/main/swagger/SchemaRegistry.yaml b/contract/src/main/swagger/SchemaRegistry.yaml index 105d65bdc..b3be3c254 100644 --- a/contract/src/main/swagger/SchemaRegistry.yaml +++ b/contract/src/main/swagger/SchemaRegistry.yaml @@ -752,8 +752,14 @@ definitions: enum: - AllowAny - DenyAll - - BackwardAndForward - backwardAndForward: + - Forward + - Backward + - BackwardTransitive + - ForwardTransitive + - Full + - FullTransitive + - Advanced + advanced: description: Backward and forward policy details. $ref: "#/definitions/BackwardAndForward" required: @@ -929,5 +935,6 @@ definitions: description: Version for the schema in the group. additionalProperties: $ref: "#/definitions/VersionInfo" + required: - groups diff --git a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java index 73b3e58d0..d984e938e 100644 --- a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java +++ b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java @@ -38,15 +38,15 @@ public class ModelHelperTest { public void testDecode() { SerializationFormat type = new SerializationFormat().serializationFormat(SerializationFormat.SerializationFormatEnum.CUSTOM).customTypeName("a"); Compatibility backward = new Compatibility() - .policy(Compatibility.PolicyEnum.BACKWARDANDFORWARD) - .backwardAndForward(new BackwardAndForward().backwardPolicy(new BackwardPolicy() + .policy(Compatibility.PolicyEnum.ADVANCED) + .advanced(new BackwardAndForward().backwardPolicy(new BackwardPolicy() .backwardPolicy(new Backward().name(Backward.class.getSimpleName())))); SchemaInfo schema = new SchemaInfo() .type("a").serializationFormat(type).schemaData(new byte[0]).properties(Collections.emptyMap()); VersionInfo version = new VersionInfo().type("a").version(1).id(1); Compatibility backwardTillForwardTill = new Compatibility() - .policy(Compatibility.PolicyEnum.BACKWARDANDFORWARD) - .backwardAndForward(new BackwardAndForward().backwardPolicy(new BackwardPolicy() + .policy(Compatibility.PolicyEnum.ADVANCED) + .advanced(new BackwardAndForward().backwardPolicy(new BackwardPolicy() .backwardPolicy(new BackwardTill().name(BackwardTill.class.getSimpleName()).version(version))) .forwardPolicy(new ForwardPolicy().forwardPolicy(new ForwardTill().name(ForwardTill.class.getSimpleName()).version(version))) ); @@ -132,9 +132,9 @@ public void testEncode() { assertEquals(encodingInfo.getSchemaInfo(), schema); Compatibility rules = ModelHelper.encode(compatibility); - assertEquals(rules.getPolicy(), Compatibility.PolicyEnum.BACKWARDANDFORWARD); - assertTrue(rules.getBackwardAndForward().getBackwardPolicy().getBackwardPolicy() instanceof BackwardTill); - assertTrue(rules.getBackwardAndForward().getForwardPolicy().getForwardPolicy() instanceof ForwardTill); + assertEquals(rules.getPolicy(), Compatibility.PolicyEnum.ADVANCED); + assertTrue(rules.getAdvanced().getBackwardPolicy().getBackwardPolicy() instanceof BackwardTill); + assertTrue(rules.getAdvanced().getForwardPolicy().getForwardPolicy() instanceof ForwardTill); GroupHistoryRecord schemaEvolution = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.GroupHistoryRecord( schemaInfo, versionInfo, compatibility, 100L, "")); From ce1aa98c2a963f3f59e5c8417ef2eb326e31d489 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Fri, 19 Jun 2020 06:34:25 -0700 Subject: [PATCH 24/70] addschema Signed-off-by: Shivesh Ranjan --- .../schemaregistry/contract/v1/ApiV1.java | 4 +- contract/src/main/swagger/SchemaRegistry.yaml | 62 +++++++++---------- 2 files changed, 30 insertions(+), 36 deletions(-) diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java index 5f693e218..7c4ead830 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java @@ -68,7 +68,7 @@ Response addCodecType(@ApiParam(value = "Group name", required = true) @PathPara @ApiParam(value = "The codec type", required = true) String codecType); @POST - @Path("/{groupName}/schemas/versions") + @Path("/{groupName}/schemas") @Consumes({"application/json"}) @Produces({"application/json"}) @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new schema to the group", response = VersionInfo.class, tags = {"Group", }) @@ -298,7 +298,7 @@ void addCodecType(@ApiParam(value = "Group name", required = true) @PathParam("g @ApiParam(value = "Add codec type", required = true) String codecType, @Suspended AsyncResponse asyncResponse); @POST - @Path("/{groupName}/schemas/versions") + @Path("/{groupName}/schemas") @Consumes({"application/json"}) @Produces({"application/json"}) @io.swagger.annotations.ApiOperation(value = "", notes = "Adds a new schema to the group", response = VersionInfo.class, tags = {"Group", }) diff --git a/contract/src/main/swagger/SchemaRegistry.yaml b/contract/src/main/swagger/SchemaRegistry.yaml index b3be3c254..464f99b1d 100644 --- a/contract/src/main/swagger/SchemaRegistry.yaml +++ b/contract/src/main/swagger/SchemaRegistry.yaml @@ -43,8 +43,6 @@ paths: name: limit type: integer description: The numbers of items to return - required: - - limit responses: 200: description: List of all groups @@ -163,9 +161,6 @@ paths: $ref: "#/definitions/Compatibility" previousCompatibility: $ref: "#/definitions/Compatibility" - nullable: true - required: - - compatibility responses: 200: description: Updated schema compatibility policy @@ -202,34 +197,6 @@ paths: description: Group with given name not found 500: description: Internal server error while fetching Group's latest schemas - /groups/{groupName}/schemas/versions: - parameters: - - in: path - name: groupName - description: Group name - required: true - type: string - get: - tags: - - "Group" - operationId: getSchemaVersions - description: Get all schema versions for the group - parameters: - - in: query - name: type - type: string - description: Type of object the schema describes. - produces: - - application/json - responses: - 200: - description: Versioned history of schemas registered under the group - schema: - $ref: "#/definitions/SchemaVersionsList" - 404: - description: Group with given name not found - 500: - description: Internal server error while fetching Group schema versions post: tags: - "Group" @@ -259,6 +226,34 @@ paths: description: Invalid serialization format 500: description: Internal server error while adding schema to group + /groups/{groupName}/schemas/versions: + parameters: + - in: path + name: groupName + description: Group name + required: true + type: string + get: + tags: + - "Group" + operationId: getSchemaVersions + description: Get all schema versions for the group + parameters: + - in: query + name: type + type: string + description: Type of object the schema describes. + produces: + - application/json + responses: + 200: + description: Versioned history of schemas registered under the group + schema: + $ref: "#/definitions/SchemaVersionsList" + 404: + description: Group with given name not found + 500: + description: Internal server error while fetching Group schema versions /groups/{groupName}/schemas/versions/find: parameters: - in: path @@ -569,7 +564,6 @@ paths: 500: description: Internal server error while registering codectype to a Group /schemas/addedTo: - parameters: post: tags: - "Schemas" From bffd0f7883c8532e22a3d2e64638c3bf210295aa Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Sun, 21 Jun 2020 20:31:57 -0700 Subject: [PATCH 25/70] javadoc Signed-off-by: Shivesh Ranjan --- .../io/pravega/schemaregistry/client/SchemaRegistryClient.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java index 8234ec569..448c86fc1 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java @@ -326,7 +326,8 @@ SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schema List getCodecTypes(String groupId) throws ResourceNotFoundException, UnauthorizedException; /** - * Add new codec type to be used in encoding in the group. + * Add new codec type to be used in encoding in the group. Adding a new codectype is backward incompatible. + * Make sure all readers are upgraded to use the new codec before any writers use the codec to encode the data. * * @param groupId Id for the group. * @param codecType codec type. From 049fb6df65bb5c76e6d0da643790a5344ce342b2 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Mon, 22 Jun 2020 01:10:33 -0700 Subject: [PATCH 26/70] group id generator Signed-off-by: Shivesh Ranjan --- .../io/pravega/schemaregistry/GroupIdGenerator.java | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java b/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java index 36a1488fb..2a4b4da4c 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java @@ -10,12 +10,12 @@ package io.pravega.schemaregistry; import com.google.common.base.Preconditions; +import io.pravega.shared.NameUtils; import lombok.SneakyThrows; /** * Defines strategies for generating groupId for stream. - * Currently there is only one naming strategy that uses streams fully qualified scoped stream name and encodes it using - * URL encoder. + * Currently there is only one naming strategy that uses streams fully qualified scoped stream name. */ public class GroupIdGenerator { private GroupIdGenerator() { @@ -27,13 +27,7 @@ public static String getGroupId(Type type, String... args) { case QualifiedStreamName: Preconditions.checkNotNull(args); Preconditions.checkArgument(args.length == 2); - StringBuilder qualifiedNameBuilder = new StringBuilder(); - qualifiedNameBuilder.append("pravega://"); - for (String arg : args) { - qualifiedNameBuilder.append(arg); - qualifiedNameBuilder.append("/"); - } - return qualifiedNameBuilder.toString(); + return NameUtils.getScopedStreamName(args[0], args[1]); default: throw new IllegalArgumentException(); } From 910f8702404f1541d086d589f2545ffd6f92d7cd Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Mon, 22 Jun 2020 02:06:04 -0700 Subject: [PATCH 27/70] add error message in model helper Signed-off-by: Shivesh Ranjan --- .../client/TestSchemaRegistryClient.java | 8 +- .../contract/data/GroupHistoryRecord.java | 20 ++-- .../generated/rest/model/BackwardTill.java | 29 +++--- .../generated/rest/model/ForwardTill.java | 29 +++--- .../rest/model/GroupHistoryRecord.java | 29 +++--- .../rest/model/SchemaWithVersion.java | 29 +++--- .../model/UpdateCompatibilityRequest.java | 3 +- .../contract/transform/ModelHelper.java | 99 +++++++++++-------- contract/src/main/swagger/SchemaRegistry.yaml | 8 +- .../contract/transform/ModelHelperTest.java | 10 +- 10 files changed, 139 insertions(+), 125 deletions(-) diff --git a/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java b/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java index 10824420c..52360606e 100644 --- a/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java +++ b/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java @@ -216,7 +216,7 @@ public void testSchemasApi() { SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); VersionInfo versionInfo = new VersionInfo("schema1", 5, 5); io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion schemaVersion = new io.pravega.schemaregistry.contract.generated.rest.model.SchemaWithVersion() - .schemaInfo(ModelHelper.encode(schemaInfo)).version(ModelHelper.encode(versionInfo)); + .schemaInfo(ModelHelper.encode(schemaInfo)).versionInfo(ModelHelper.encode(versionInfo)); SchemaVersionsList schemaList = new SchemaVersionsList(); schemaList.addSchemasItem(schemaVersion); doReturn(schemaList).when(response).readEntity(SchemaVersionsList.class); @@ -421,7 +421,7 @@ public void testGroupEvolutionHistory() { SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); Compatibility compatibility = Compatibility.backward(); GroupHistoryRecord groupHistoryRecord = new io.pravega.schemaregistry.contract.generated.rest.model.GroupHistoryRecord() - .schemaInfo(ModelHelper.encode(schemaInfo)).version(ModelHelper.encode(versionInfo)) + .schemaInfo(ModelHelper.encode(schemaInfo)).versionInfo(ModelHelper.encode(versionInfo)) .compatibility(ModelHelper.encode(compatibility)).timestamp(100L).schemaString(""); GroupHistory history = new GroupHistory(); history.addHistoryItem(groupHistoryRecord); @@ -429,8 +429,8 @@ public void testGroupEvolutionHistory() { List groupHistoryList = client.getGroupHistory("mygroup"); assertEquals(1, groupHistoryList.size()); assertEquals(compatibility, groupHistoryList.get(0).getCompatibility()); - assertEquals(schemaInfo, groupHistoryList.get(0).getSchema()); - assertEquals(versionInfo, groupHistoryList.get(0).getVersion()); + assertEquals(schemaInfo, groupHistoryList.get(0).getSchemaInfo()); + assertEquals(versionInfo, groupHistoryList.get(0).getVersionInfo()); assertEquals(100L, groupHistoryList.get(0).getTimestamp()); assertEquals("", groupHistoryList.get(0).getSchemaString()); //NotFound Exception diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java index 0eee07c76..d333508f8 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/GroupHistoryRecord.java @@ -13,32 +13,32 @@ /** * Describes changes to the group and the compatibility {@link GroupHistoryRecord#compatibility} that were - * applied while registering {@link GroupHistoryRecord#schema} and the unique {@link GroupHistoryRecord#version} identifier + * applied while registering {@link GroupHistoryRecord#schemaInfo} and the unique {@link GroupHistoryRecord#versionInfo} identifier * that was assigned to it. - * It also has {@link GroupHistoryRecord#timestamp} when the schema was added and includes an optional + * It also has {@link GroupHistoryRecord#timestamp} when the schemaInfo was added and includes an optional * {@link GroupHistoryRecord#schemaString} which is populated only if serialization format is one of {@link SerializationFormat#Avro} - * {@link SerializationFormat#Json} or {@link SerializationFormat#Protobuf}. This string is just to help make the schema human readable. + * {@link SerializationFormat#Json} or {@link SerializationFormat#Protobuf}. This string is just to help make the schemaInfo human readable. */ @Data public class GroupHistoryRecord { /** - * Schema information object for the schema that was added to the group. + * Schema information object for the schemaInfo that was added to the group. */ - private final SchemaInfo schema; + private final SchemaInfo schemaInfo; /** - * Version information object that uniquely identifies the schema in the group. + * Version information object that uniquely identifies the schemaInfo in the group. */ - private final VersionInfo version; + private final VersionInfo versionInfo; /** - * Compatibility applied at the time when the schema was registered. + * Compatibility applied at the time when the schemaInfo was registered. */ private final Compatibility compatibility; /** - * Service's Time when the schema was registered. + * Service's Time when the schemaInfo was registered. */ private final long timestamp; /** - * A json format string representing the schema. This string will be populated only for serialization formats + * A json format string representing the schemaInfo. This string will be populated only for serialization formats * that the service can parse. */ private final String schemaString; diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTill.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTill.java index f4e383a23..44ca296b8 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTill.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTill.java @@ -30,8 +30,8 @@ public class BackwardTill { @JsonProperty("name") private String name = null; - @JsonProperty("version") - private VersionInfo version = null; + @JsonProperty("versionInfo") + private VersionInfo versionInfo = null; public BackwardTill name(String name) { this.name = name; @@ -53,24 +53,23 @@ public void setName(String name) { this.name = name; } - public BackwardTill version(VersionInfo version) { - this.version = version; + public BackwardTill versionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; return this; } /** * Whether given schema is valid with respect to existing group schemas against the configured compatibility. - * @return version + * @return versionInfo **/ - @JsonProperty("version") - @ApiModelProperty(required = true, value = "Whether given schema is valid with respect to existing group schemas against the configured compatibility.") - @NotNull - public VersionInfo getVersion() { - return version; + @JsonProperty("versionInfo") + @ApiModelProperty(value = "Whether given schema is valid with respect to existing group schemas against the configured compatibility.") + public VersionInfo getVersionInfo() { + return versionInfo; } - public void setVersion(VersionInfo version) { - this.version = version; + public void setVersionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; } @@ -84,12 +83,12 @@ public boolean equals(java.lang.Object o) { } BackwardTill backwardTill = (BackwardTill) o; return Objects.equals(this.name, backwardTill.name) && - Objects.equals(this.version, backwardTill.version); + Objects.equals(this.versionInfo, backwardTill.versionInfo); } @Override public int hashCode() { - return Objects.hash(name, version); + return Objects.hash(name, versionInfo); } @@ -99,7 +98,7 @@ public String toString() { sb.append("class BackwardTill {\n"); sb.append(" name: ").append(toIndentedString(name)).append("\n"); - sb.append(" version: ").append(toIndentedString(version)).append("\n"); + sb.append(" versionInfo: ").append(toIndentedString(versionInfo)).append("\n"); sb.append("}"); return sb.toString(); } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTill.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTill.java index e2dd0b840..6176540c6 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTill.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTill.java @@ -30,8 +30,8 @@ public class ForwardTill { @JsonProperty("name") private String name = null; - @JsonProperty("version") - private VersionInfo version = null; + @JsonProperty("versionInfo") + private VersionInfo versionInfo = null; public ForwardTill name(String name) { this.name = name; @@ -53,24 +53,23 @@ public void setName(String name) { this.name = name; } - public ForwardTill version(VersionInfo version) { - this.version = version; + public ForwardTill versionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; return this; } /** * Whether given schema is valid with respect to existing group schemas against the configured compatibility. - * @return version + * @return versionInfo **/ - @JsonProperty("version") - @ApiModelProperty(required = true, value = "Whether given schema is valid with respect to existing group schemas against the configured compatibility.") - @NotNull - public VersionInfo getVersion() { - return version; + @JsonProperty("versionInfo") + @ApiModelProperty(value = "Whether given schema is valid with respect to existing group schemas against the configured compatibility.") + public VersionInfo getVersionInfo() { + return versionInfo; } - public void setVersion(VersionInfo version) { - this.version = version; + public void setVersionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; } @@ -84,12 +83,12 @@ public boolean equals(java.lang.Object o) { } ForwardTill forwardTill = (ForwardTill) o; return Objects.equals(this.name, forwardTill.name) && - Objects.equals(this.version, forwardTill.version); + Objects.equals(this.versionInfo, forwardTill.versionInfo); } @Override public int hashCode() { - return Objects.hash(name, version); + return Objects.hash(name, versionInfo); } @@ -99,7 +98,7 @@ public String toString() { sb.append("class ForwardTill {\n"); sb.append(" name: ").append(toIndentedString(name)).append("\n"); - sb.append(" version: ").append(toIndentedString(version)).append("\n"); + sb.append(" versionInfo: ").append(toIndentedString(versionInfo)).append("\n"); sb.append("}"); return sb.toString(); } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java index d993a454c..3b3f84e9a 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java @@ -32,8 +32,8 @@ public class GroupHistoryRecord { @JsonProperty("schemaInfo") private SchemaInfo schemaInfo = null; - @JsonProperty("version") - private VersionInfo version = null; + @JsonProperty("versionInfo") + private VersionInfo versionInfo = null; @JsonProperty("compatibility") private Compatibility compatibility = null; @@ -64,24 +64,23 @@ public void setSchemaInfo(SchemaInfo schemaInfo) { this.schemaInfo = schemaInfo; } - public GroupHistoryRecord version(VersionInfo version) { - this.version = version; + public GroupHistoryRecord versionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; return this; } /** * Schema version information object. - * @return version + * @return versionInfo **/ - @JsonProperty("version") - @ApiModelProperty(required = true, value = "Schema version information object.") - @NotNull - public VersionInfo getVersion() { - return version; + @JsonProperty("versionInfo") + @ApiModelProperty(value = "Schema version information object.") + public VersionInfo getVersionInfo() { + return versionInfo; } - public void setVersion(VersionInfo version) { - this.version = version; + public void setVersionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; } public GroupHistoryRecord compatibility(Compatibility compatibility) { @@ -154,7 +153,7 @@ public boolean equals(java.lang.Object o) { } GroupHistoryRecord groupHistoryRecord = (GroupHistoryRecord) o; return Objects.equals(this.schemaInfo, groupHistoryRecord.schemaInfo) && - Objects.equals(this.version, groupHistoryRecord.version) && + Objects.equals(this.versionInfo, groupHistoryRecord.versionInfo) && Objects.equals(this.compatibility, groupHistoryRecord.compatibility) && Objects.equals(this.timestamp, groupHistoryRecord.timestamp) && Objects.equals(this.schemaString, groupHistoryRecord.schemaString); @@ -162,7 +161,7 @@ public boolean equals(java.lang.Object o) { @Override public int hashCode() { - return Objects.hash(schemaInfo, version, compatibility, timestamp, schemaString); + return Objects.hash(schemaInfo, versionInfo, compatibility, timestamp, schemaString); } @@ -172,7 +171,7 @@ public String toString() { sb.append("class GroupHistoryRecord {\n"); sb.append(" schemaInfo: ").append(toIndentedString(schemaInfo)).append("\n"); - sb.append(" version: ").append(toIndentedString(version)).append("\n"); + sb.append(" versionInfo: ").append(toIndentedString(versionInfo)).append("\n"); sb.append(" compatibility: ").append(toIndentedString(compatibility)).append("\n"); sb.append(" timestamp: ").append(toIndentedString(timestamp)).append("\n"); sb.append(" schemaString: ").append(toIndentedString(schemaString)).append("\n"); diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java index bc0687fff..23cc08d6b 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java @@ -31,8 +31,8 @@ public class SchemaWithVersion { @JsonProperty("schemaInfo") private SchemaInfo schemaInfo = null; - @JsonProperty("version") - private VersionInfo version = null; + @JsonProperty("versionInfo") + private VersionInfo versionInfo = null; public SchemaWithVersion schemaInfo(SchemaInfo schemaInfo) { this.schemaInfo = schemaInfo; @@ -54,24 +54,23 @@ public void setSchemaInfo(SchemaInfo schemaInfo) { this.schemaInfo = schemaInfo; } - public SchemaWithVersion version(VersionInfo version) { - this.version = version; + public SchemaWithVersion versionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; return this; } /** * Version information. - * @return version + * @return versionInfo **/ - @JsonProperty("version") - @ApiModelProperty(required = true, value = "Version information.") - @NotNull - public VersionInfo getVersion() { - return version; + @JsonProperty("versionInfo") + @ApiModelProperty(value = "Version information.") + public VersionInfo getVersionInfo() { + return versionInfo; } - public void setVersion(VersionInfo version) { - this.version = version; + public void setVersionInfo(VersionInfo versionInfo) { + this.versionInfo = versionInfo; } @@ -85,12 +84,12 @@ public boolean equals(java.lang.Object o) { } SchemaWithVersion schemaWithVersion = (SchemaWithVersion) o; return Objects.equals(this.schemaInfo, schemaWithVersion.schemaInfo) && - Objects.equals(this.version, schemaWithVersion.version); + Objects.equals(this.versionInfo, schemaWithVersion.versionInfo); } @Override public int hashCode() { - return Objects.hash(schemaInfo, version); + return Objects.hash(schemaInfo, versionInfo); } @@ -100,7 +99,7 @@ public String toString() { sb.append("class SchemaWithVersion {\n"); sb.append(" schemaInfo: ").append(toIndentedString(schemaInfo)).append("\n"); - sb.append(" version: ").append(toIndentedString(version)).append("\n"); + sb.append(" versionInfo: ").append(toIndentedString(versionInfo)).append("\n"); sb.append("}"); return sb.toString(); } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateCompatibilityRequest.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateCompatibilityRequest.java index 9b8e6720e..602baed69 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateCompatibilityRequest.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/UpdateCompatibilityRequest.java @@ -42,8 +42,7 @@ public UpdateCompatibilityRequest compatibility(Compatibility compatibility) { * @return compatibility **/ @JsonProperty("compatibility") - @ApiModelProperty(required = true, value = "") - @NotNull + @ApiModelProperty(value = "") public Compatibility getCompatibility() { return compatibility; } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java index 4a63928e7..b7f40de4d 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java @@ -48,21 +48,22 @@ public class ModelHelper { // region decode public static io.pravega.schemaregistry.contract.data.SchemaInfo decode(SchemaInfo schemaInfo) { - Preconditions.checkArgument(schemaInfo != null); - Preconditions.checkArgument(schemaInfo.getType() != null); - Preconditions.checkArgument(schemaInfo.getSerializationFormat() != null); - Preconditions.checkArgument(schemaInfo.getProperties() != null); - Preconditions.checkArgument(schemaInfo.getSchemaData() != null); + Preconditions.checkArgument(schemaInfo != null, "SchemaInfo cannot be null"); + Preconditions.checkArgument(schemaInfo.getType() != null, "SchemaInfo type cannot be null"); + Preconditions.checkArgument(schemaInfo.getSerializationFormat() != null, "Serialization format cannot be null"); + Preconditions.checkArgument(schemaInfo.getSchemaData() != null, "schema data cannot be null"); io.pravega.schemaregistry.contract.data.SerializationFormat serializationFormat = decode(schemaInfo.getSerializationFormat()); + ImmutableMap properties = schemaInfo.getProperties() == null ? ImmutableMap.of() : + ImmutableMap.copyOf(schemaInfo.getProperties()); return new io.pravega.schemaregistry.contract.data.SchemaInfo(schemaInfo.getType(), - serializationFormat, ByteBuffer.wrap(schemaInfo.getSchemaData()), ImmutableMap.copyOf(schemaInfo.getProperties())); + serializationFormat, ByteBuffer.wrap(schemaInfo.getSchemaData()), properties); } public static io.pravega.schemaregistry.contract.data.SerializationFormat decode(SerializationFormat serializationFormat) { - Preconditions.checkArgument(serializationFormat != null); + Preconditions.checkArgument(serializationFormat != null, "serialization format cannot be null"); switch (serializationFormat.getSerializationFormat()) { case CUSTOM: - Preconditions.checkArgument(serializationFormat.getCustomTypeName() != null); + Preconditions.checkArgument(serializationFormat.getCustomTypeName() != null, "Custom name not supplied"); return io.pravega.schemaregistry.contract.data.SerializationFormat.custom(serializationFormat.getCustomTypeName()); default: return searchEnum(io.pravega.schemaregistry.contract.data.SerializationFormat.class, serializationFormat.getSerializationFormat().name()); @@ -103,13 +104,14 @@ public static io.pravega.schemaregistry.contract.data.Compatibility decode(Compa .backwardAndForward(decode(compatibility.getAdvanced())).build(); break; default: - throw new IllegalArgumentException(); + throw new IllegalArgumentException("Unknown compatibility type"); } return decoded; } public static io.pravega.schemaregistry.contract.data.BackwardAndForward decode(BackwardAndForward compatibility) { - Preconditions.checkArgument(compatibility.getBackwardPolicy() != null || compatibility.getForwardPolicy() != null); + Preconditions.checkArgument(compatibility.getBackwardPolicy() != null || compatibility.getForwardPolicy() != null, + "At least one of Backward or Forward policy needs to be supplied for Advanced Compatibility"); io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardAndForwardBuilder builder = io.pravega.schemaregistry.contract.data.BackwardAndForward.builder(); @@ -123,6 +125,7 @@ public static io.pravega.schemaregistry.contract.data.BackwardAndForward decode( } public static io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardPolicy decode(BackwardPolicy backward) { + Preconditions.checkArgument(backward != null, "backward policy cannot be null"); Object obj = backward.getBackwardPolicy(); if (backward.getBackwardPolicy() instanceof Map) { String name = (String) ((Map) backward.getBackwardPolicy()).get("name"); @@ -133,7 +136,7 @@ public static io.pravega.schemaregistry.contract.data.BackwardAndForward.Backwar } else if (name.equals(BACKWARD_TILL)) { obj = MAPPER.convertValue(backward.getBackwardPolicy(), BackwardTill.class); } else { - throw new IllegalArgumentException(); + throw new IllegalArgumentException("Backward policy needs to be one of Backward, BackwardTill or BackwardTransitive"); } } @@ -141,15 +144,17 @@ public static io.pravega.schemaregistry.contract.data.BackwardAndForward.Backwar return new io.pravega.schemaregistry.contract.data.BackwardAndForward.Backward(); } else if (obj instanceof BackwardTill) { return new io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardTill( - decode(((io.pravega.schemaregistry.contract.generated.rest.model.BackwardTill) backward.getBackwardPolicy()).getVersion())); + decode(((io.pravega.schemaregistry.contract.generated.rest.model.BackwardTill) backward.getBackwardPolicy()).getVersionInfo())); } else if (obj instanceof BackwardTransitive) { return new io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardTransitive(); } else { - throw new IllegalArgumentException("Rule not supported"); + throw new IllegalArgumentException("Backward policy needs to be one of Backward, BackwardTill or BackwardTransitive."); } } public static io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardPolicy decode(io.pravega.schemaregistry.contract.generated.rest.model.ForwardPolicy forward) { + Preconditions.checkArgument(forward != null, "forward policy cannot be null"); + Object obj = forward.getForwardPolicy(); if (forward.getForwardPolicy() instanceof Map) { String name = (String) ((Map) forward.getForwardPolicy()).get("name"); @@ -160,7 +165,7 @@ public static io.pravega.schemaregistry.contract.data.BackwardAndForward.Forward } else if (name.equals(FORWARD_TILL)) { obj = MAPPER.convertValue(forward.getForwardPolicy(), ForwardTill.class); } else { - throw new IllegalArgumentException(); + throw new IllegalArgumentException("Forward policy needs to be one of Forward, ForwardTill or ForwardTransitive."); } } @@ -168,63 +173,77 @@ public static io.pravega.schemaregistry.contract.data.BackwardAndForward.Forward return new io.pravega.schemaregistry.contract.data.BackwardAndForward.Forward(); } else if (obj instanceof ForwardTill) { return new io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardTill( - decode(((io.pravega.schemaregistry.contract.generated.rest.model.ForwardTill) forward.getForwardPolicy()).getVersion())); + decode(((io.pravega.schemaregistry.contract.generated.rest.model.ForwardTill) forward.getForwardPolicy()).getVersionInfo())); } else if (obj instanceof ForwardTransitive) { return new io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardTransitive(); } else { - throw new IllegalArgumentException("Rule not supported"); + throw new IllegalArgumentException("Forward policy needs to be one of Forward, ForwardTill or ForwardTransitive."); } } public static io.pravega.schemaregistry.contract.data.VersionInfo decode(VersionInfo versionInfo) { - Preconditions.checkArgument(versionInfo != null); - Preconditions.checkArgument(versionInfo.getType() != null); - Preconditions.checkArgument(versionInfo.getVersion() != null); - Preconditions.checkArgument(versionInfo.getId() != null); + Preconditions.checkArgument(versionInfo != null, "Version info cannot be null"); + Preconditions.checkArgument(versionInfo.getType() != null, "type cannot be null"); + Preconditions.checkArgument(versionInfo.getVersion() != null, "version cannot be null"); + Preconditions.checkArgument(versionInfo.getId() != null, "id cannot be null"); return new io.pravega.schemaregistry.contract.data.VersionInfo(versionInfo.getType(), versionInfo.getVersion(), versionInfo.getId()); } public static io.pravega.schemaregistry.contract.data.EncodingInfo decode(EncodingInfo encodingInfo) { - Preconditions.checkArgument(encodingInfo != null); + Preconditions.checkArgument(encodingInfo != null, "EncodingInfo cannot be null"); + Preconditions.checkArgument(encodingInfo.getVersionInfo() != null, "VersionInfo cannot be null"); + Preconditions.checkArgument(encodingInfo.getSchemaInfo() != null, "SchemaInfo cannot be null"); + Preconditions.checkArgument(encodingInfo.getCodecType() != null, "CodecType cannot be null"); return new io.pravega.schemaregistry.contract.data.EncodingInfo(decode(encodingInfo.getVersionInfo()), decode(encodingInfo.getSchemaInfo()), encodingInfo.getCodecType()); } public static io.pravega.schemaregistry.contract.data.SchemaWithVersion decode(SchemaWithVersion schemaWithVersion) { - Preconditions.checkArgument(schemaWithVersion != null); + Preconditions.checkArgument(schemaWithVersion != null, "schema with version cannot be null"); + Preconditions.checkArgument(schemaWithVersion.getVersionInfo() != null, "VersionInfo cannot be null"); + Preconditions.checkArgument(schemaWithVersion.getSchemaInfo() != null, "SchemaInfo cannot be null"); return new io.pravega.schemaregistry.contract.data.SchemaWithVersion(decode(schemaWithVersion.getSchemaInfo()), - decode(schemaWithVersion.getVersion())); + decode(schemaWithVersion.getVersionInfo())); } - public static io.pravega.schemaregistry.contract.data.GroupHistoryRecord decode(GroupHistoryRecord schemaEvolution) { - Preconditions.checkArgument(schemaEvolution != null); + public static io.pravega.schemaregistry.contract.data.GroupHistoryRecord decode(GroupHistoryRecord historyRecord) { + Preconditions.checkArgument(historyRecord != null, "history record be null"); + Preconditions.checkArgument(historyRecord.getSchemaInfo() != null, "schemaInfo be null"); + Preconditions.checkArgument(historyRecord.getVersionInfo() != null, "versionInfo be null"); + Preconditions.checkArgument(historyRecord.getTimestamp() != null, "Timestamp be null"); + Preconditions.checkArgument(historyRecord.getCompatibility() != null, "Compatibility be null"); - return new io.pravega.schemaregistry.contract.data.GroupHistoryRecord(decode(schemaEvolution.getSchemaInfo()), - decode(schemaEvolution.getVersion()), decode(schemaEvolution.getCompatibility()), schemaEvolution.getTimestamp(), - schemaEvolution.getSchemaString()); + return new io.pravega.schemaregistry.contract.data.GroupHistoryRecord(decode(historyRecord.getSchemaInfo()), + decode(historyRecord.getVersionInfo()), decode(historyRecord.getCompatibility()), historyRecord.getTimestamp(), + historyRecord.getSchemaString()); } public static io.pravega.schemaregistry.contract.data.EncodingId decode(EncodingId encodingId) { - Preconditions.checkArgument(encodingId != null); + Preconditions.checkArgument(encodingId != null, "EncodingId cannot be null"); + Preconditions.checkArgument(encodingId.getEncodingId() != null, "EncodingId cannot be null"); Preconditions.checkArgument(encodingId.getEncodingId() != null); return new io.pravega.schemaregistry.contract.data.EncodingId(encodingId.getEncodingId()); } public static io.pravega.schemaregistry.contract.data.GroupProperties decode(GroupProperties groupProperties) { - Preconditions.checkArgument(groupProperties != null); - Preconditions.checkArgument(groupProperties.isAllowMultipleTypes() != null); + Preconditions.checkArgument(groupProperties != null, "group properties cannot be null"); + Preconditions.checkArgument(groupProperties.isAllowMultipleTypes() != null, "is allow multiple type cannot be null"); + Preconditions.checkArgument(groupProperties.getSerializationFormat() != null, "serialization format cannot be null"); + Preconditions.checkArgument(groupProperties.getCompatibility() != null, "compatibility cannot be null"); + ImmutableMap properties = groupProperties.getProperties() == null ? ImmutableMap.of() : + ImmutableMap.copyOf(groupProperties.getProperties()); return io.pravega.schemaregistry.contract.data.GroupProperties.builder().serializationFormat(decode(groupProperties.getSerializationFormat())) .compatibility(decode(groupProperties.getCompatibility())).allowMultipleTypes(groupProperties.isAllowMultipleTypes()) - .properties(ImmutableMap.copyOf(groupProperties.getProperties())).build(); + .properties(properties).build(); } // endregion // region encode public static GroupHistoryRecord encode(io.pravega.schemaregistry.contract.data.GroupHistoryRecord groupHistoryRecord) { - return new GroupHistoryRecord().schemaInfo(encode(groupHistoryRecord.getSchema())) - .version(encode(groupHistoryRecord.getVersion())) + return new GroupHistoryRecord().schemaInfo(encode(groupHistoryRecord.getSchemaInfo())) + .versionInfo(encode(groupHistoryRecord.getVersionInfo())) .compatibility(encode(groupHistoryRecord.getCompatibility())) .timestamp(groupHistoryRecord.getTimestamp()) .schemaString(groupHistoryRecord.getSchemaString()); @@ -257,9 +276,9 @@ public static BackwardPolicy encode(io.pravega.schemaregistry.contract.data.Back return new BackwardPolicy().backwardPolicy(new BackwardTransitive().name(BackwardTransitive.class.getSimpleName())); } else if (backwardPolicy instanceof io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardTill) { VersionInfo version = encode(((io.pravega.schemaregistry.contract.data.BackwardAndForward.BackwardTill) backwardPolicy).getVersionInfo()); - return new BackwardPolicy().backwardPolicy(new BackwardTill().name(BackwardTill.class.getSimpleName()).version(version)); + return new BackwardPolicy().backwardPolicy(new BackwardTill().name(BackwardTill.class.getSimpleName()).versionInfo(version)); } else { - throw new IllegalArgumentException(); + throw new IllegalArgumentException("Backward policy needs to be one of Backward BackwardTill or BackwardTransitive"); } } @@ -270,15 +289,15 @@ public static ForwardPolicy encode(io.pravega.schemaregistry.contract.data.Backw return new ForwardPolicy().forwardPolicy(new ForwardTransitive().name(ForwardTransitive.class.getSimpleName())); } else if (forwardPolicy instanceof io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardTill) { VersionInfo version = encode(((io.pravega.schemaregistry.contract.data.BackwardAndForward.ForwardTill) forwardPolicy).getVersionInfo()); - return new ForwardPolicy().forwardPolicy(new ForwardTill().name(ForwardTill.class.getSimpleName()).version(version)); + return new ForwardPolicy().forwardPolicy(new ForwardTill().name(ForwardTill.class.getSimpleName()).versionInfo(version)); } else { - throw new IllegalArgumentException(); + throw new IllegalArgumentException("Forward policy needs to be one of Forward ForwardTill or ForwardTransitive"); } } public static SchemaWithVersion encode(io.pravega.schemaregistry.contract.data.SchemaWithVersion schemaWithVersion) { return new SchemaWithVersion().schemaInfo(encode(schemaWithVersion.getSchemaInfo())) - .version(encode(schemaWithVersion.getVersionInfo())); + .versionInfo(encode(schemaWithVersion.getVersionInfo())); } public static GroupProperties encode(io.pravega.schemaregistry.contract.data.GroupProperties groupProperties) { @@ -327,6 +346,6 @@ private static > T searchEnum(Class enumeration, String sea return each; } } - throw new IllegalArgumentException(); + throw new IllegalArgumentException(String.format("Value %s not found in enum %s", search, enumeration.getSimpleName())); } } \ No newline at end of file diff --git a/contract/src/main/swagger/SchemaRegistry.yaml b/contract/src/main/swagger/SchemaRegistry.yaml index 464f99b1d..dba96ef12 100644 --- a/contract/src/main/swagger/SchemaRegistry.yaml +++ b/contract/src/main/swagger/SchemaRegistry.yaml @@ -694,7 +694,7 @@ definitions: schemaInfo: description: Schema information. $ref: "#/definitions/SchemaInfo" - version: + versionInfo: description: Version information. $ref: "#/definitions/VersionInfo" required: @@ -842,7 +842,7 @@ definitions: properties: name: type: string - version: + versionInfo: description: Whether given schema is valid with respect to existing group schemas against the configured compatibility. $ref: "#/definitions/VersionInfo" required: @@ -854,7 +854,7 @@ definitions: properties: name: type: string - version: + versionInfo: description: Whether given schema is valid with respect to existing group schemas against the configured compatibility. $ref: "#/definitions/VersionInfo" required: @@ -894,7 +894,7 @@ definitions: schemaInfo: description: Schema information object. $ref: "#/definitions/SchemaInfo" - version: + versionInfo: description: Schema version information object. $ref: "#/definitions/VersionInfo" compatibility: diff --git a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java index d984e938e..6f672b378 100644 --- a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java +++ b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java @@ -47,8 +47,8 @@ public void testDecode() { Compatibility backwardTillForwardTill = new Compatibility() .policy(Compatibility.PolicyEnum.ADVANCED) .advanced(new BackwardAndForward().backwardPolicy(new BackwardPolicy() - .backwardPolicy(new BackwardTill().name(BackwardTill.class.getSimpleName()).version(version))) - .forwardPolicy(new ForwardPolicy().forwardPolicy(new ForwardTill().name(ForwardTill.class.getSimpleName()).version(version))) + .backwardPolicy(new BackwardTill().name(BackwardTill.class.getSimpleName()).versionInfo(version))) + .forwardPolicy(new ForwardPolicy().forwardPolicy(new ForwardTill().name(ForwardTill.class.getSimpleName()).versionInfo(version))) ); String codecType = "custom"; @@ -85,7 +85,7 @@ public void testDecode() { assertEquals(encodingInfo.getCodecType(), "custom"); assertEquals(encodingInfo.getVersionInfo(), versionInfo); assertEquals(encodingInfo.getSchemaInfo(), schemaInfo); - io.pravega.schemaregistry.contract.data.SchemaWithVersion schemaWithVersion = ModelHelper.decode(new SchemaWithVersion().schemaInfo(schema).version(version)); + io.pravega.schemaregistry.contract.data.SchemaWithVersion schemaWithVersion = ModelHelper.decode(new SchemaWithVersion().schemaInfo(schema).versionInfo(version)); assertEquals(schemaWithVersion.getVersionInfo(), versionInfo); assertEquals(schemaWithVersion.getSchemaInfo(), schemaInfo); @@ -140,13 +140,13 @@ public void testEncode() { schemaInfo, versionInfo, compatibility, 100L, "")); assertEquals(schemaEvolution.getSchemaInfo(), schema); assertEquals(schemaEvolution.getCompatibility(), rules); - assertEquals(schemaEvolution.getVersion(), version); + assertEquals(schemaEvolution.getVersionInfo(), version); assertEquals(schemaEvolution.getTimestamp().longValue(), 100L); assertEquals(schemaEvolution.getSchemaString(), ""); SchemaWithVersion schemaWithVersion = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.SchemaWithVersion(schemaInfo, versionInfo)); assertEquals(schemaWithVersion.getSchemaInfo(), schema); - assertEquals(schemaWithVersion.getVersion(), version); + assertEquals(schemaWithVersion.getVersionInfo(), version); GroupProperties groupProperties = ModelHelper.encode(prop); assertEquals(groupProperties.getSerializationFormat(), type); From a44c0fb1b61fc930f7f3e61674700d83ac635c5a Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Mon, 22 Jun 2020 03:23:52 -0700 Subject: [PATCH 28/70] required Signed-off-by: Shivesh Ranjan --- .../contract/generated/rest/model/BackwardTill.java | 3 ++- .../contract/generated/rest/model/ForwardTill.java | 3 ++- .../contract/generated/rest/model/GroupHistoryRecord.java | 3 ++- .../contract/generated/rest/model/SchemaWithVersion.java | 3 ++- contract/src/main/swagger/SchemaRegistry.yaml | 8 ++++---- 5 files changed, 12 insertions(+), 8 deletions(-) diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTill.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTill.java index 44ca296b8..a636e4429 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTill.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/BackwardTill.java @@ -63,7 +63,8 @@ public BackwardTill versionInfo(VersionInfo versionInfo) { * @return versionInfo **/ @JsonProperty("versionInfo") - @ApiModelProperty(value = "Whether given schema is valid with respect to existing group schemas against the configured compatibility.") + @ApiModelProperty(required = true, value = "Whether given schema is valid with respect to existing group schemas against the configured compatibility.") + @NotNull public VersionInfo getVersionInfo() { return versionInfo; } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTill.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTill.java index 6176540c6..4eba58d47 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTill.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/ForwardTill.java @@ -63,7 +63,8 @@ public ForwardTill versionInfo(VersionInfo versionInfo) { * @return versionInfo **/ @JsonProperty("versionInfo") - @ApiModelProperty(value = "Whether given schema is valid with respect to existing group schemas against the configured compatibility.") + @ApiModelProperty(required = true, value = "Whether given schema is valid with respect to existing group schemas against the configured compatibility.") + @NotNull public VersionInfo getVersionInfo() { return versionInfo; } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java index 3b3f84e9a..350fa9904 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/GroupHistoryRecord.java @@ -74,7 +74,8 @@ public GroupHistoryRecord versionInfo(VersionInfo versionInfo) { * @return versionInfo **/ @JsonProperty("versionInfo") - @ApiModelProperty(value = "Schema version information object.") + @ApiModelProperty(required = true, value = "Schema version information object.") + @NotNull public VersionInfo getVersionInfo() { return versionInfo; } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java index 23cc08d6b..6272b5761 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SchemaWithVersion.java @@ -64,7 +64,8 @@ public SchemaWithVersion versionInfo(VersionInfo versionInfo) { * @return versionInfo **/ @JsonProperty("versionInfo") - @ApiModelProperty(value = "Version information.") + @ApiModelProperty(required = true, value = "Version information.") + @NotNull public VersionInfo getVersionInfo() { return versionInfo; } diff --git a/contract/src/main/swagger/SchemaRegistry.yaml b/contract/src/main/swagger/SchemaRegistry.yaml index dba96ef12..90b83990e 100644 --- a/contract/src/main/swagger/SchemaRegistry.yaml +++ b/contract/src/main/swagger/SchemaRegistry.yaml @@ -699,7 +699,7 @@ definitions: $ref: "#/definitions/VersionInfo" required: - schemaInfo - - version + - versionInfo SchemaVersionsList: type: object description: List of schemas with their versions. @@ -847,7 +847,7 @@ definitions: $ref: "#/definitions/VersionInfo" required: - name - - version + - versionInfo ForwardTill: type: object description: ForwardPolicy compatibility which tells the service to check for forwardPolicy compatibility with all previous schemas till specific version. @@ -859,7 +859,7 @@ definitions: $ref: "#/definitions/VersionInfo" required: - name - - version + - versionInfo CodecTypesList: type: object description: Response object for listCodecTypes. @@ -909,7 +909,7 @@ definitions: type: string required: - schemaInfo - - version + - versionInfo - compatibility - timestamp GroupHistory: From 48da36028598fab48583b56c6046db827a55ed33 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Tue, 23 Jun 2020 04:54:25 -0700 Subject: [PATCH 29/70] add new deserializer factory method Signed-off-by: Shivesh Ranjan --- .../serializers/SerializerFactory.java | 48 ++++++++++++++++++- .../pravega/schemaregistry/GroupIdTest.java | 6 +-- 2 files changed, 48 insertions(+), 6 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index f66a502ba..24ddfc09b 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -590,10 +590,11 @@ protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, Schema // region multi format deserializer /** - * A deserializer that can read data where each event could be written with different serialization formats. + * A deserializer that can read data where each event could be written with either of avro, protobuf or json + * serialization formats. * * @param config serializer config - * @return a deserializer that can deserialize protobuf, json or avro events into java objects. + * @return a deserializer that can deserialize events serialized as protobuf, json or avro into java objects. */ public static Serializer multiFormatGenericDeserializer(SerializerConfig config) { String groupId = config.getGroupId(); @@ -619,6 +620,49 @@ public static Serializer multiFormatGenericDeserializer(SerializerConfig encodingCache); } + /** + * A deserializer that can read data where each event could be written with different serialization formats. + * It has built in deserializers for protobuf, avro and json. In addition to it, users can supply + * deserializers for their custom formats. + * + * @param config serializer config + * @param deserializers Map of serialization format to corresponding deserializer. + * @return a deserializer that can deserialize protobuf, json or avro events into java objects. + */ + public static Serializer multiFormatGenericDeserializer(SerializerConfig config, Map> deserializers) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + AbstractPravegaDeserializer json = new JsonGenericDeserlizer(config.getGroupId(), schemaRegistryClient, + config.getDecoder(), encodingCache); + AbstractPravegaDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + encodingCache); + AbstractPravegaDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + encodingCache); + + Map map = new HashMap<>(); + map.put(SerializationFormat.Json, json); + map.put(SerializationFormat.Avro, avro); + map.put(SerializationFormat.Protobuf, protobuf); + + deserializers.forEach((key, value) -> { + map.put(key, new AbstractPravegaDeserializer(groupId, schemaRegistryClient, null, false, config.getDecoder(), encodingCache) { + @Override + protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + return value.deserialize(inputStream, writerSchema, readerSchema); + } + }); + }); + + return new MultipleFormatGenericDeserializer(groupId, schemaRegistryClient, map, config.getDecoder(), + encodingCache); + } + /** * A deserializer that can read data where each event could be written with different serialization formats and * deserializes and converts them to a json string. diff --git a/serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java b/serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java index ef0c7524c..322fb02f6 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java @@ -16,14 +16,12 @@ import java.net.URLDecoder; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; public class GroupIdTest { @Test public void testGroupId() throws UnsupportedEncodingException { String groupId = GroupIdGenerator.getGroupId(GroupIdGenerator.Type.QualifiedStreamName, "scope", "stream"); - - assertTrue(groupId.startsWith("pravega")); - assertEquals(URLDecoder.decode(groupId, Charsets.UTF_8.toString()), "pravega://scope/stream/"); + + assertEquals(URLDecoder.decode(groupId, Charsets.UTF_8.toString()), "scope/stream"); } } From 72919bbdf992167ca1edb56133b24e96ec62a78b Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Tue, 23 Jun 2020 20:17:07 -0700 Subject: [PATCH 30/70] PR comments Signed-off-by: Shivesh Ranjan --- .../client/SchemaRegistryClient.java | 54 +++-------- .../client/SchemaRegistryClientImpl.java | 92 +++++++++---------- .../contract/data/SerializationFormat.java | 31 +++++-- .../rest/model/SerializationFormat.java | 32 +++---- .../contract/transform/ModelHelper.java | 15 +-- contract/src/main/swagger/SchemaRegistry.yaml | 4 +- .../contract/transform/ModelHelperTest.java | 4 +- 7 files changed, 111 insertions(+), 121 deletions(-) diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java index 448c86fc1..3c1965c29 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java @@ -28,7 +28,7 @@ /** * Defines a registry client for interacting with schema registry service. - * The implementation of this interface should provide atomicity and read-after-write-consistency guarantees for all the methods. + * The implementation of this interface should provide read-after-write-consistency guarantees for all the methods. */ @Beta public interface SchemaRegistryClient { @@ -63,9 +63,8 @@ public interface SchemaRegistryClient { * List all groups that the user is authorized on. This returns an iterator where each element is a pair of group * name and group properties. * This iterator can be used to iterate over each element until all elements are exhausted. - * The implementation should guarantee that all groups added before and till the iterator continues to return - * {@link Iterator#hasNext()} = true should be available for iteration. - * + * The implementation should guarantee that all groups added before the iterator {@link Iterator#hasNext()} = false + * will be included. * @return map of names of groups with corresponding group properties for all groups. * @throws UnauthorizedException if the user is unauthorized. */ @@ -105,7 +104,9 @@ boolean updateCompatibility(String groupId, Compatibility compatibility, @Nullab /** * Gets list of latest schemas for each object types registered under the group. Objects are identified by {@link SchemaInfo#type}. - * Schemas are retrieved atomically. So all schemas added before this call will be returned by this call. + * Schema registry provides consistency guarantees. So all schemas added before this call will be returned by this call. + * However, the service side implementation is not guaranteed to be atomic. + * So if schemas are being added concurrently, the schemas returned by this api may or may not include those. * * @param groupId Id for the group. * @return Unordered list of different objects within the group. @@ -157,28 +158,7 @@ VersionInfo addSchema(String groupId, SchemaInfo schemaInfo) throws SchemaValida * @throws UnauthorizedException if the user is unauthorized. */ void deleteSchemaVersion(String groupId, VersionInfo versionInfo) throws ResourceNotFoundException, UnauthorizedException; - - /** - * Deletes the schema associated to the given version. Users should be very careful while using this API in production, - * esp if the schema has already been used to write the data. - * An implementation of the delete call is expected to be idempotent. The behaviour of delete schema API invocation - * with the schema registry service is idempotent. - * The service performs a soft delete of the schema. So getSchemaVersion with the version info will still return the schema. - * However, the schema will not participate in any compatibility checks once deleted. - * It will not be included in listing schema versions for the group using APIs like {@link SchemaRegistryClient#getSchemaVersions} - * or {@link SchemaRegistryClient#getGroupHistory} or {@link SchemaRegistryClient#getSchemas} or - * {@link SchemaRegistryClient#getLatestSchemaVersion} - * If add schema is called again using this deleted schema will result in a new version being assigned to it upon registration. - * - * @param groupId Id for the group. - * @param schemaType schemaType that identifies the type of object the schema represents. This should be same as the - * value specified in {@link SchemaInfo#type}. - * @param version Version number which uniquely identifies schema for the schemaType within a group. - * @throws ResourceNotFoundException if group is not found. - * @throws UnauthorizedException if the user is unauthorized. - */ - void deleteSchemaVersion(String groupId, String schemaType, int version) throws ResourceNotFoundException, UnauthorizedException; - + /** * Gets schema corresponding to the version. * @@ -189,19 +169,7 @@ VersionInfo addSchema(String groupId, SchemaInfo schemaInfo) throws SchemaValida * @throws UnauthorizedException if the user is unauthorized. */ SchemaInfo getSchemaForVersion(String groupId, VersionInfo versionInfo) throws ResourceNotFoundException, UnauthorizedException; - - /** - * Gets schema corresponding to the version. - * - * @param groupId Id for the group. - * @param schemaType schemaType as specified in the {@link SchemaInfo#type} while registering the schema. - * @param version Version which uniquely identifies schema of schemaType within a group. - * @return Schema info corresponding to the version info. - * @throws ResourceNotFoundException if group or version is not found. - * @throws UnauthorizedException if the user is unauthorized. - */ - SchemaInfo getSchemaForVersion(String groupId, String schemaType, int version) throws ResourceNotFoundException, UnauthorizedException; - + /** * Gets encoding info against the requested encoding Id. The purpose of encoding info is to uniquely identify the encoding * used on the data at rest. The encoding covers two parts - @@ -340,9 +308,11 @@ SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schema * Gets complete schema evolution history of the group with schemas, versions, compatibility policy and * time when the schema was added to the group. * The order in the list matches the order in which schemas were evolved within the group. - * This call is atomic and will get a consistent view at the time when the request is processed on the service. + * This call will get a consistent view at the time when the request is processed on the service. * So all schemas that were added before this call are returned and all schemas that were deleted before this call - * are excluded. + * are excluded. + * The execution of this API is non-atomic and if concurrent requests to add or delete schemas are invoked, it may or may not + * include those schemas in the response. * * @param groupId Id for the group. * @return Ordered list of schemas with versions and compatibility policy for all schemas in the group. diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java index d7a5da869..0a5ec62f4 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java @@ -85,7 +85,8 @@ public boolean addGroup(String groupId, GroupProperties groupProperties) { return withRetry(() -> { CreateGroupRequest request = new CreateGroupRequest().groupName(groupId).groupProperties(ModelHelper.encode(groupProperties)); Response response = groupProxy.createGroup(request); - switch (Response.Status.fromStatusCode(response.getStatus())) { + Response.Status status = Response.Status.fromStatusCode(response.getStatus()); + switch (status) { case CREATED: return true; case CONFLICT: @@ -93,7 +94,7 @@ public boolean addGroup(String groupId, GroupProperties groupProperties) { case BAD_REQUEST: throw new BadArgumentException("Group properties invalid."); default: - throw new InternalServerError("Internal Service error. Failed to add the group."); + return handleResponse(status, "Internal Service error. Failed to add the group."); } }); } @@ -106,7 +107,7 @@ public void removeGroup(String groupId) { case NO_CONTENT: return; default: - throw new InternalServerError("Internal Service error. Failed to remove the group."); + handleResponse(Response.Status.fromStatusCode(response.getStatus()), "Internal Service error. Failed to remove the group."); } }); } @@ -134,7 +135,7 @@ private ListGroupsResponse getListGroupsResponse(String continuationToken) { case OK: return response.readEntity(ListGroupsResponse.class); default: - throw new InternalServerError("Internal Service error. Failed to list groups."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), "Internal Service error. Failed to list groups."); } }); } @@ -149,7 +150,7 @@ public GroupProperties getGroupProperties(String groupId) { case NOT_FOUND: throw new ResourceNotFoundException("Group not found."); default: - throw new InternalServerError("Internal Service error. Failed to list groups."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), "Internal Service error. Failed to list groups."); } }); } @@ -172,7 +173,8 @@ public boolean updateCompatibility(String groupId, Compatibility compatibility, case OK: return true; default: - throw new InternalServerError("Internal Service error. Failed to update compatibility."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Internal Service error. Failed to update compatibility."); } }); } @@ -192,7 +194,8 @@ private List latestSchemas(String groupId, String type) { case NOT_FOUND: throw new ResourceNotFoundException("Group not found."); default: - throw new InternalServerError("Internal Service error. Failed to get object types."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Internal Service error. Failed to get object types."); } }); } @@ -213,7 +216,8 @@ public VersionInfo addSchema(String groupId, SchemaInfo schemaInfo) { case BAD_REQUEST: throw new MalformedSchemaException("Schema is malformed. Verify the schema data and type"); default: - throw new InternalServerError("Internal Service error. Failed to addSchema."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Internal Service error. Failed to addSchema."); } }); } @@ -225,19 +229,8 @@ public void deleteSchemaVersion(String groupId, VersionInfo versionInfo) { if (response.getStatus() == Response.Status.NOT_FOUND.getStatusCode()) { throw new ResourceNotFoundException("Group not found."); } else if (response.getStatus() != Response.Status.NO_CONTENT.getStatusCode()) { - throw new InternalServerError("Internal Service error. Failed to get schema."); - } - }); - } - - @Override - public void deleteSchemaVersion(String groupId, String schemaType, int version) { - withRetry(() -> { - Response response = groupProxy.deleteSchemaVersion(groupId, schemaType, version); - if (response.getStatus() == Response.Status.NOT_FOUND.getStatusCode()) { - throw new ResourceNotFoundException("Group not found."); - } else if (response.getStatus() != Response.Status.NO_CONTENT.getStatusCode()) { - throw new InternalServerError("Internal Service error. Failed to get schema."); + handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Internal Service error. Failed to get schema."); } }); } @@ -252,22 +245,8 @@ public SchemaInfo getSchemaForVersion(String groupId, VersionInfo versionInfo) { case NOT_FOUND: throw new ResourceNotFoundException("Schema not found."); default: - throw new InternalServerError("Internal Service error. Failed to get schema."); - } - }); - } - - @Override - public SchemaInfo getSchemaForVersion(String groupId, String schemaType, int version) { - return withRetry(() -> { - Response response = groupProxy.getSchemaFromVersion(groupId, schemaType, version); - switch (Response.Status.fromStatusCode(response.getStatus())) { - case OK: - return ModelHelper.decode(response.readEntity(io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo.class)); - case NOT_FOUND: - throw new ResourceNotFoundException("Schema not found."); - default: - throw new InternalServerError("Internal Service error. Failed to get schema."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Internal Service error. Failed to get schema."); } }); } @@ -282,7 +261,8 @@ public EncodingInfo getEncodingInfo(String groupId, EncodingId encodingId) { case NOT_FOUND: throw new ResourceNotFoundException("Encoding not found."); default: - throw new InternalServerError("Internal Service error. Failed to get encoding info."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Internal Service error. Failed to get encoding info."); } }); } @@ -302,7 +282,8 @@ public EncodingId getEncodingId(String groupId, VersionInfo versionInfo, String case PRECONDITION_FAILED: throw new CodecTypeNotRegisteredException(String.format("Codec type %s not registered.", codecType)); default: - throw new InternalServerError("Internal Service error. Failed to get encoding info."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Internal Service error. Failed to get encoding info."); } }); } @@ -328,7 +309,8 @@ public List getSchemaVersions(String groupId, @Nullable Strin case NOT_FOUND: throw new ResourceNotFoundException("getSchemaVersions failed. Group does not exist."); default: - throw new InternalServerError("Internal Service error. Failed to get schema versions for group."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Internal Service error. Failed to get schema versions for group."); } }); } @@ -344,7 +326,8 @@ public List getGroupHistory(String groupId) { case NOT_FOUND: throw new ResourceNotFoundException("getGroupHistory failed. Either Group or Version does not exist."); default: - throw new InternalServerError("Internal Service error. Failed to get schema evolution history for group."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Internal Service error. Failed to get schema evolution history for group."); } }); } @@ -361,7 +344,8 @@ public Map getSchemaReferences(SchemaInfo schemaInfo) throw case NOT_FOUND: throw new ResourceNotFoundException("getSchemaReferences failed. Either Group or Version does not exist."); default: - throw new InternalServerError("Internal Service error. Failed to get schema evolution history for group."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Internal Service error. Failed to get schema evolution history for group."); } }); } @@ -378,7 +362,8 @@ public VersionInfo getVersionForSchema(String groupId, SchemaInfo schema) { case NOT_FOUND: throw new ResourceNotFoundException("Schema not found."); default: - throw new InternalServerError("Internal Service error. Failed to get schema version."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Internal Service error. Failed to get schema version."); } }); } @@ -395,7 +380,8 @@ public boolean validateSchema(String groupId, SchemaInfo schemaInfo) { case NOT_FOUND: throw new ResourceNotFoundException("Group not found."); default: - throw new InternalServerError("Internal Service error."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Internal Service error."); } }); } @@ -411,7 +397,8 @@ public boolean canReadUsing(String groupId, SchemaInfo schemaInfo) { case NOT_FOUND: throw new ResourceNotFoundException("Schema not found."); default: - throw new InternalServerError("Internal Service error."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Internal Service error."); } }); } @@ -427,7 +414,8 @@ public List getCodecTypes(String groupId) { case NOT_FOUND: throw new ResourceNotFoundException("Group not found."); default: - throw new InternalServerError("Failed to get codecTypes. Internal server error."); + return handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Failed to get codecTypes. Internal server error."); } }); } @@ -443,7 +431,8 @@ public void addCodecType(String groupId, String codecType) { case NOT_FOUND: throw new ResourceNotFoundException("Group not found."); default: - throw new InternalServerError("Failed to add codec type. Internal server error."); + handleResponse(Response.Status.fromStatusCode(response.getStatus()), + "Failed to add codec type. Internal server error."); } }); } @@ -458,4 +447,13 @@ private void withRetry(Runnable runnable) { return null; }); } + + private T handleResponse(Response.Status status, String errorMessage) { + switch (status) { + case FORBIDDEN: + throw new UnauthorizedException("User not authorized."); + default: + throw new InternalServerError(errorMessage); + } + } } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java index 2cee9f4fc..1c24104e2 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/SerializationFormat.java @@ -9,6 +9,8 @@ */ package io.pravega.schemaregistry.contract.data; +import com.google.common.base.Preconditions; +import com.google.common.base.Strings; import lombok.AccessLevel; import lombok.Getter; import lombok.Setter; @@ -17,8 +19,10 @@ * Different types of serialization formats used for serializing data. * Registry supports Avro, Protobuf and Json serialization formats but any custom type could be used with the registry using custom type. * - * If a serialization format is not present in the enum it can be specified using {@link SerializationFormat#custom} with {@link SerializationFormat#customTypeName}. - * Allowed values of {@link BackwardAndForward} mode with custom type are AllowAny or DenyAll. + * If a serialization format is not present in the enum it can be specified using {@link SerializationFormat#custom} with + * {@link SerializationFormat#fullTypeName}. + * Allowed values of {@link Compatibility} with {@link SerializationFormat#custom} are {@link Compatibility#allowAny} + * or {@link Compatibility#denyAll}. */ public enum SerializationFormat { @@ -30,16 +34,31 @@ public enum SerializationFormat { @Getter @Setter(AccessLevel.PRIVATE) - private String customTypeName; + private String fullTypeName; /** * Method to define a custom serialization format with a custom name. - * @param customTypeName Custom type name. + * @param fullTypeName Custom type name. * @return {@link SerializationFormat#Custom} with supplied custom type name. */ - public static SerializationFormat custom(String customTypeName) { + public static SerializationFormat custom(String fullTypeName) { + Preconditions.checkArgument(!Strings.isNullOrEmpty(fullTypeName)); SerializationFormat type = SerializationFormat.Custom; - type.setCustomTypeName(customTypeName); + type.setFullTypeName(fullTypeName); + return type; + } + + /** + * Method to create a serialization format with a full name. + * + * @param fullTypeName Custom type name. + * @param format Serialization format. + * @return {@link SerializationFormat#Custom} with supplied custom type name. + */ + public static SerializationFormat withName(SerializationFormat format, String fullTypeName) { + Preconditions.checkArgument(format != null); + SerializationFormat type = SerializationFormat.valueOf(format.name()); + type.setFullTypeName(fullTypeName); return type; } } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SerializationFormat.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SerializationFormat.java index bc980cbd6..5f0bb9df2 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SerializationFormat.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/SerializationFormat.java @@ -22,9 +22,9 @@ import javax.validation.constraints.*; /** - * Serialization format enum that lists different serialization formats supported by the service. To use additional formats, use serializationFormat.Custom and supply customTypeName. + * Serialization format enum that lists different serialization formats supported by the service. To use additional formats, use serializationFormat.Custom and supply fullTypeName. */ -@ApiModel(description = "Serialization format enum that lists different serialization formats supported by the service. To use additional formats, use serializationFormat.Custom and supply customTypeName.") +@ApiModel(description = "Serialization format enum that lists different serialization formats supported by the service. To use additional formats, use serializationFormat.Custom and supply fullTypeName.") public class SerializationFormat { /** @@ -67,8 +67,8 @@ public static SerializationFormatEnum fromValue(String text) { @JsonProperty("serializationFormat") private SerializationFormatEnum serializationFormat = null; - @JsonProperty("customTypeName") - private String customTypeName = null; + @JsonProperty("fullTypeName") + private String fullTypeName = null; public SerializationFormat serializationFormat(SerializationFormatEnum serializationFormat) { this.serializationFormat = serializationFormat; @@ -90,23 +90,23 @@ public void setSerializationFormat(SerializationFormatEnum serializationFormat) this.serializationFormat = serializationFormat; } - public SerializationFormat customTypeName(String customTypeName) { - this.customTypeName = customTypeName; + public SerializationFormat fullTypeName(String fullTypeName) { + this.fullTypeName = fullTypeName; return this; } /** - * Get customTypeName - * @return customTypeName + * Get fullTypeName + * @return fullTypeName **/ - @JsonProperty("customTypeName") + @JsonProperty("fullTypeName") @ApiModelProperty(value = "") - public String getCustomTypeName() { - return customTypeName; + public String getFullTypeName() { + return fullTypeName; } - public void setCustomTypeName(String customTypeName) { - this.customTypeName = customTypeName; + public void setFullTypeName(String fullTypeName) { + this.fullTypeName = fullTypeName; } @@ -120,12 +120,12 @@ public boolean equals(java.lang.Object o) { } SerializationFormat serializationFormat = (SerializationFormat) o; return Objects.equals(this.serializationFormat, serializationFormat.serializationFormat) && - Objects.equals(this.customTypeName, serializationFormat.customTypeName); + Objects.equals(this.fullTypeName, serializationFormat.fullTypeName); } @Override public int hashCode() { - return Objects.hash(serializationFormat, customTypeName); + return Objects.hash(serializationFormat, fullTypeName); } @@ -135,7 +135,7 @@ public String toString() { sb.append("class SerializationFormat {\n"); sb.append(" serializationFormat: ").append(toIndentedString(serializationFormat)).append("\n"); - sb.append(" customTypeName: ").append(toIndentedString(customTypeName)).append("\n"); + sb.append(" fullTypeName: ").append(toIndentedString(fullTypeName)).append("\n"); sb.append("}"); return sb.toString(); } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java index b7f40de4d..beb08164b 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java @@ -63,10 +63,12 @@ public static io.pravega.schemaregistry.contract.data.SerializationFormat decode Preconditions.checkArgument(serializationFormat != null, "serialization format cannot be null"); switch (serializationFormat.getSerializationFormat()) { case CUSTOM: - Preconditions.checkArgument(serializationFormat.getCustomTypeName() != null, "Custom name not supplied"); - return io.pravega.schemaregistry.contract.data.SerializationFormat.custom(serializationFormat.getCustomTypeName()); + Preconditions.checkArgument(serializationFormat.getFullTypeName() != null, "Custom name not supplied"); + return io.pravega.schemaregistry.contract.data.SerializationFormat.custom(serializationFormat.getFullTypeName()); default: - return searchEnum(io.pravega.schemaregistry.contract.data.SerializationFormat.class, serializationFormat.getSerializationFormat().name()); + return io.pravega.schemaregistry.contract.data.SerializationFormat.withName( + searchEnum(io.pravega.schemaregistry.contract.data.SerializationFormat.class, serializationFormat.getSerializationFormat().name()), + serializationFormat.getFullTypeName()); } } @@ -319,12 +321,13 @@ public static SchemaInfo encode(io.pravega.schemaregistry.contract.data.SchemaIn public static SerializationFormat encode(io.pravega.schemaregistry.contract.data.SerializationFormat serializationFormat) { if (serializationFormat.equals(io.pravega.schemaregistry.contract.data.SerializationFormat.Custom)) { - Preconditions.checkArgument(serializationFormat.getCustomTypeName() != null); + Preconditions.checkArgument(serializationFormat.getFullTypeName() != null); SerializationFormat serializationFormatModel = new SerializationFormat().serializationFormat(SerializationFormat.SerializationFormatEnum.CUSTOM); - return serializationFormatModel.customTypeName(serializationFormat.getCustomTypeName()); + return serializationFormatModel.fullTypeName(serializationFormat.getFullTypeName()); } else { return new SerializationFormat().serializationFormat( - searchEnum(SerializationFormat.SerializationFormatEnum.class, serializationFormat.name())); + searchEnum(SerializationFormat.SerializationFormatEnum.class, serializationFormat.name())) + .fullTypeName(serializationFormat.getFullTypeName()); } } diff --git a/contract/src/main/swagger/SchemaRegistry.yaml b/contract/src/main/swagger/SchemaRegistry.yaml index 90b83990e..65448ea59 100644 --- a/contract/src/main/swagger/SchemaRegistry.yaml +++ b/contract/src/main/swagger/SchemaRegistry.yaml @@ -629,7 +629,7 @@ definitions: - compatibility SerializationFormat: type: object - description: Serialization format enum that lists different serialization formats supported by the service. To use additional formats, use serializationFormat.Custom and supply customTypeName. + description: Serialization format enum that lists different serialization formats supported by the service. To use additional formats, use serializationFormat.Custom and supply fullTypeName. properties: serializationFormat: type: string @@ -639,7 +639,7 @@ definitions: - Json - Any - Custom - customTypeName: + fullTypeName: type: string required: - serializationFormat diff --git a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java index 6f672b378..0ca9f4803 100644 --- a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java +++ b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java @@ -36,7 +36,7 @@ public class ModelHelperTest { @Test public void testDecode() { - SerializationFormat type = new SerializationFormat().serializationFormat(SerializationFormat.SerializationFormatEnum.CUSTOM).customTypeName("a"); + SerializationFormat type = new SerializationFormat().serializationFormat(SerializationFormat.SerializationFormatEnum.CUSTOM).fullTypeName("a"); Compatibility backward = new Compatibility() .policy(Compatibility.PolicyEnum.ADVANCED) .advanced(new BackwardAndForward().backwardPolicy(new BackwardPolicy() @@ -55,7 +55,7 @@ public void testDecode() { // decodes io.pravega.schemaregistry.contract.data.SerializationFormat serializationFormat = ModelHelper.decode(type); assertEquals(serializationFormat, io.pravega.schemaregistry.contract.data.SerializationFormat.Custom); - assertEquals(serializationFormat.getCustomTypeName(), "a"); + assertEquals(serializationFormat.getFullTypeName(), "a"); io.pravega.schemaregistry.contract.data.SchemaInfo schemaInfo = ModelHelper.decode(schema); assertEquals(schemaInfo.getType(), "a"); From 7c9ba8e58beb4dc1581a5ef1a13dffa44352e7ae Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 24 Jun 2020 19:22:45 -0700 Subject: [PATCH 31/70] avro reflect deserializer Signed-off-by: Shivesh Ranjan --- .../schemaregistry/schemas/AvroSchema.java | 20 +++++---- .../schemaregistry/schemas/JSONSchema.java | 5 ++- .../serializers/AvroDeserlizer.java | 16 ++++--- .../serializers/JsonStringDeserializer.java | 43 +++++++++++++++++++ .../serializers/SerializerFactory.java | 33 ++++++++++---- .../serializers/SerializerTest.java | 6 +++ 6 files changed, 100 insertions(+), 23 deletions(-) create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java index a1c4b6c24..530929f42 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java @@ -32,17 +32,21 @@ public class AvroSchema implements SchemaContainer { @Getter private final Schema schema; private final SchemaInfo schemaInfo; - - private AvroSchema(Schema schema) { + @Getter + private final Class tClass; + + private AvroSchema(Schema schema, Class tClass) { this.schema = schema; this.schemaInfo = new SchemaInfo(schema.getName(), SerializationFormat.Avro, getSchemaBytes(), ImmutableMap.of()); + this.tClass = tClass; } - private AvroSchema(SchemaInfo schemaInfo) { + private AvroSchema(SchemaInfo schemaInfo, Class tClass) { String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); this.schema = new Schema.Parser().parse(schemaString); this.schemaInfo = schemaInfo; + this.tClass = tClass; } /** @@ -61,7 +65,7 @@ public static AvroSchema of(Class tClass) { } else { schema = ReflectData.get().getSchema(tClass); } - return new AvroSchema<>(schema); + return new AvroSchema<>(schema, tClass); } /** @@ -71,7 +75,7 @@ public static AvroSchema of(Class tClass) { * @return Returns an AvroSchema with {@link GenericRecord} type. */ public static AvroSchema of(Schema schema) { - return new AvroSchema<>(schema); + return new AvroSchema<>(schema, null); } /** @@ -83,10 +87,10 @@ public static AvroSchema of(Schema schema) { * @param Type of class whose schema is to be used. * @return Returns an AvroSchema with {@link SpecificRecordBase} type. */ - public static AvroSchema ofBaseType(Class tClass) { + public static AvroSchema ofBaseType(Class tClass) { Preconditions.checkArgument(SpecificRecordBase.class.isAssignableFrom(tClass)); - return new AvroSchema<>(SpecificData.get().getSchema(tClass)); + return new AvroSchema<>(SpecificData.get().getSchema(tClass), SpecificRecordBase.class); } /** @@ -96,7 +100,7 @@ public static AvroSchema ofBaseType(Class from(SchemaInfo schemaInfo) { - return new AvroSchema<>(schemaInfo); + return new AvroSchema<>(schemaInfo, null); } private ByteBuffer getSchemaBytes() { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java index 10d7f9fc5..f07dc0679 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -29,6 +29,7 @@ * @param Type of element. */ public class JSONSchema implements SchemaContainer { + @Getter private final String schemaString; @Getter private final Class tClass; @@ -78,7 +79,7 @@ public static JSONSchema of(Class tClass) { JsonSchema schema = schemaGen.generateSchema(tClass); String schemaString = objectMapper.writeValueAsString(schema); - return new JSONSchema<>(schema, null, schemaString, tClass); + return new JSONSchema<>(schema, tClass.getSimpleName(), schemaString, tClass); } /** @@ -102,7 +103,7 @@ public static JSONSchema ofBaseType(Class tDerivedClass, Cla JsonSchema schema = schemaGen.generateSchema(tDerivedClass); String schemaString = objectMapper.writeValueAsString(schema); - return new JSONSchema<>(schema, null, schemaString, tClass, tDerivedClass); + return new JSONSchema<>(schema, tDerivedClass.getSimpleName(), schemaString, tClass, tDerivedClass); } /** diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java index f27c712c8..8c986e1fb 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java @@ -20,14 +20,15 @@ import io.pravega.schemaregistry.schemas.AvroSchema; import lombok.SneakyThrows; import org.apache.avro.Schema; -import org.apache.avro.generic.IndexedRecord; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.DecoderFactory; +import org.apache.avro.reflect.ReflectDatumReader; import org.apache.avro.specific.SpecificDatumReader; +import org.apache.avro.specific.SpecificRecordBase; import java.io.InputStream; -class AvroDeserlizer extends AbstractPravegaDeserializer { +class AvroDeserlizer extends AbstractPravegaDeserializer { private final AvroSchema avroSchema; private final LoadingCache knownSchemas; @@ -52,9 +53,14 @@ protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, Sc Preconditions.checkNotNull(writerSchemaInfo); Schema writerSchema = knownSchemas.get(writerSchemaInfo.getSchemaData().array()); Schema readerSchema = avroSchema.getSchema(); - - SpecificDatumReader datumReader = new SpecificDatumReader<>(writerSchema, readerSchema); BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null); - return datumReader.read(null, decoder); + + if (SpecificRecordBase.class.isAssignableFrom(avroSchema.getTClass())) { + SpecificDatumReader datumReader = new SpecificDatumReader<>(writerSchema, readerSchema); + return datumReader.read(null, decoder); + } else { + ReflectDatumReader datumReader = new ReflectDatumReader<>(writerSchema, readerSchema); + return datumReader.read(null, decoder); + } } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java new file mode 100644 index 000000000..845eeb851 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java @@ -0,0 +1,43 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.PropertyAccessor; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.pravega.schemaregistry.cache.EncodingCache; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import lombok.SneakyThrows; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; + +class JsonStringDeserializer extends AbstractPravegaDeserializer { + private final ObjectMapper objectMapper; + + JsonStringDeserializer(String groupId, SchemaRegistryClient client, + SerializerConfig.Decoder decoder, EncodingCache encodingCache) { + super(groupId, client, null, false, decoder, encodingCache); + this.objectMapper = new ObjectMapper(); + objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); + objectMapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); + objectMapper.setVisibility(PropertyAccessor.CREATOR, JsonAutoDetect.Visibility.ANY); + } + + @SneakyThrows({JsonProcessingException.class, IOException.class}) + @Override + protected String deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + Map obj = objectMapper.readValue(inputStream, Map.class); + return objectMapper.writeValueAsString(obj); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index 24ddfc09b..5ec6083f7 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -27,8 +27,6 @@ import io.pravega.schemaregistry.schemas.SchemaContainer; import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; -import org.apache.avro.generic.IndexedRecord; -import org.apache.avro.specific.SpecificRecordBase; import javax.annotation.Nullable; import java.io.InputStream; @@ -82,8 +80,7 @@ public static Serializer avroSerializer(SerializerConfig config, AvroSche * @param Type of event. The typed event should be an avro generated class. For generic type use {@link #avroGenericDeserializer} * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer avroDeserializer(SerializerConfig config, - AvroSchema schemaData) { + public static Serializer avroDeserializer(SerializerConfig config, AvroSchema schemaData) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemaData); SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? @@ -132,8 +129,7 @@ public static Serializer avroGenericDeserializer(SerializerConfig * @param Base Type of schemas. * @return a Serializer which can serialize events of different types for which schemas are supplied. */ - public static Serializer avroMultiTypeSerializer(SerializerConfig config, - Map, AvroSchema> schemas) { + public static Serializer avroMultiTypeSerializer(SerializerConfig config, Map, AvroSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); @@ -159,7 +155,7 @@ public static Serializer avroMultiTypeSerializer(Se * @param Base type of schemas. * @return a Deserializer which can deserialize events of different types in the stream into typed objects. */ - public static Serializer avroMultiTypeDeserializer( + public static Serializer avroMultiTypeDeserializer( SerializerConfig config, Map, AvroSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); @@ -190,7 +186,7 @@ public static Serializer avroMultiTypeDeserial * @return a Deserializer which can deserialize events of different types in the stream into typed objects or a generic * object */ - public static Serializer> avroTypedOrGenericDeserializer( + public static Serializer> avroTypedOrGenericDeserializer( SerializerConfig config, Map, AvroSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); @@ -446,6 +442,27 @@ public static Serializer jsonGenericDeserializer(SerializerCo encodingCache); } + /** + * Creates a generic json deserializer which deserializes bytes into a json string. + * + * Note: the returned serializer only implements {@link Serializer#deserialize(ByteBuffer)}. + * It does not implement {@link Serializer#serialize(Object)}. + * + * @param config Serializer Config used for instantiating a new serializer. + * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. + */ + public static Serializer jsonStringDeserializer(SerializerConfig config) { + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + + String groupId = config.getGroupId(); + + EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + + return new JsonStringDeserializer(groupId, schemaRegistryClient, config.getDecoder(), encodingCache); + } + /** * A multiplexed Json serializer that takes a map of schemas and validates them individually. * diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index 79b8c6c0e..a4d420c9e 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -9,6 +9,7 @@ */ package io.pravega.schemaregistry.serializers; +import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DynamicMessage; @@ -220,6 +221,11 @@ public void testJsonSerializers() { assertEquals(generic.getJsonSchema(), schema1.getSchema()); assertEquals(generic.getObject().size(), 4); + serialized = serializer.serialize(user1); + Serializer stringDeserializer = SerializerFactory.jsonStringDeserializer(config); + String str = stringDeserializer.deserialize(serialized); + assertFalse(Strings.isNullOrEmpty(str)); + // multi type DerivedUser2 user2 = new DerivedUser2("user", new Address("street", "city"), 2, "user2"); From 214ab54a69db1f0628e67f0809e4fa114448460d Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 24 Jun 2020 20:03:36 -0700 Subject: [PATCH 32/70] add test for avro reflect deserializer Signed-off-by: Shivesh Ranjan --- .../serializers/SerializerTest.java | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index a4d420c9e..93ee7699e 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -35,6 +35,9 @@ import io.pravega.schemaregistry.testobjs.generated.Test1; import io.pravega.schemaregistry.testobjs.generated.Test2; import io.pravega.test.common.AssertExtensions; +import lombok.Data; +import lombok.NoArgsConstructor; +import lombok.SneakyThrows; import org.apache.avro.generic.GenericRecord; import org.apache.avro.specific.SpecificRecordBase; import org.junit.Test; @@ -121,6 +124,32 @@ public void testAvroSerializers() { assertTrue(fallback.isRight()); } + @Test + @SneakyThrows + public void testAvroSerializersReflect() { + TestClass test1 = new TestClass("name"); + AvroSchema schema1 = AvroSchema.of(TestClass.class); + + SchemaRegistryClient client = mock(SchemaRegistryClient.class); + + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); + doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) + .when(client).getGroupProperties(anyString()); + doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); + doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + + Serializer serializer = SerializerFactory.avroSerializer(config, schema1); + ByteBuffer serialized = serializer.serialize(test1); + + Serializer deserializer = SerializerFactory.avroDeserializer(config, schema1); + TestClass deserialized = deserializer.deserialize(serialized); + assertEquals(deserialized, test1); + } + @Test public void testProtobufSerializers() throws IOException { SchemaRegistryClient client = mock(SchemaRegistryClient.class); @@ -391,4 +420,14 @@ public void testNoEncodingJson() throws IOException { assertNotNull(generic.getObject()); assertNull(generic.getJsonSchema()); } + + @Data + @NoArgsConstructor + public static class TestClass { + private String test; + + public TestClass(String test) { + this.test = test; + } + } } From 2241865a45ae57d236f295b6ad107e824ae2c6ce Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 24 Jun 2020 23:57:16 -0700 Subject: [PATCH 33/70] javadoc Signed-off-by: Shivesh Ranjan --- .../schemaregistry/client/SchemaRegistryClient.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java index 3c1965c29..1e2ef9f44 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java @@ -62,14 +62,19 @@ public interface SchemaRegistryClient { /** * List all groups that the user is authorized on. This returns an iterator where each element is a pair of group * name and group properties. - * This iterator can be used to iterate over each element until all elements are exhausted. - * The implementation should guarantee that all groups added before the iterator {@link Iterator#hasNext()} = false - * will be included. + * The list group is a non atomic call. The implementation is not necessarily consistent as it uses paginated + * iteration using Continuation Token. This could mean that as the list is being iterated over, the state on the server + * may be updated (some groups added or removed). For example, if a group that has been iterated over is deleted + * and recereated, the iterator may deliver a group with identical name twice. Similarly, If a group that has not yet been + * iterated over is deleted, the client may or may not see the group as it is iterating over the response depending on + * whether the client had received the deleted group from service before it was deleted or not. + * This iterator can be used to iterate over each element until all elements are exhausted and gives a weak guarantee + * that all groups added before the iterator {@link Iterator#hasNext()} = false can be iterated over. * @return map of names of groups with corresponding group properties for all groups. * @throws UnauthorizedException if the user is unauthorized. */ Iterator> listGroups() throws UnauthorizedException; - + /** * Get group properties for the group identified by the group id. * From 87c9ebcbdfe1f0a7a7a59b6e080d64f47b540833 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 25 Jun 2020 01:37:30 -0700 Subject: [PATCH 34/70] make cache package private Signed-off-by: Shivesh Ranjan --- .../schemaregistry/schemas/AvroSchema.java | 18 +++++++++- .../schemaregistry/schemas/JSONSchema.java | 19 +++++++--- .../schemas/ProtobufSchema.java | 10 +++--- .../AbstractPravegaDeserializer.java | 1 - .../serializers/AvroDeserlizer.java | 1 - .../serializers/AvroGenericDeserlizer.java | 1 - .../{cache => serializers}/EncodingCache.java | 21 ++--------- .../serializers/JsonDeserlizer.java | 1 - .../serializers/JsonGenericDeserlizer.java | 1 - .../serializers/JsonStringDeserializer.java | 1 - .../MultipleFormatGenericDeserializer.java | 1 - .../MultipleFormatJsonStringDeserializer.java | 1 - .../MultiplexedAndGenericDeserializer.java | 1 - .../serializers/MultiplexedDeserializer.java | 1 - .../serializers/ProtobufDeserlizer.java | 1 - .../ProtobufGenericDeserlizer.java | 1 - .../serializers/SerializerFactory.java | 35 +++++++++---------- .../schemaregistry/schemas/TestSchemas.java | 8 ++--- .../{cache => serializers}/CacheTest.java | 7 ++-- .../serializers/SerializerTest.java | 8 ++--- 20 files changed, 69 insertions(+), 69 deletions(-) rename serializers/src/main/java/io/pravega/schemaregistry/{cache => serializers}/EncodingCache.java (66%) rename serializers/src/test/java/io/pravega/schemaregistry/{cache => serializers}/CacheTest.java (87%) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java index 530929f42..73544bec0 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java @@ -78,6 +78,22 @@ public static AvroSchema of(Schema schema) { return new AvroSchema<>(schema, null); } + /** + * It is same as {@link #of(Class)} except that it generates an AvroSchema typed as T. + * + * This is useful for supplying a map of Avro schemas for multiplexed serializers and deserializers. + * + * @param tDerived Class whose schema should be used. + * @param tBase Base class for the typed AvroSchema object. + * @param Type of base class. + * @return Returns an AvroSchema with T type. + */ + public static AvroSchema ofBaseType(Class tDerived, Class tBase) { + Preconditions.checkArgument(tBase.isAssignableFrom(tDerived)); + + return new AvroSchema<>(ReflectData.get().getSchema(tDerived), tBase); + } + /** * It is same as {@link #of(Class)} except that it generates an AvroSchema typed as {@link SpecificRecordBase}. * @@ -87,7 +103,7 @@ public static AvroSchema of(Schema schema) { * @param Type of class whose schema is to be used. * @return Returns an AvroSchema with {@link SpecificRecordBase} type. */ - public static AvroSchema ofBaseType(Class tClass) { + public static AvroSchema ofSpecificRecord(Class tClass) { Preconditions.checkArgument(SpecificRecordBase.class.isAssignableFrom(tClass)); return new AvroSchema<>(SpecificData.get().getSchema(tClass), SpecificRecordBase.class); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java index f07dc0679..3015d7242 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -20,6 +20,7 @@ import io.pravega.schemaregistry.contract.data.SerializationFormat; import lombok.Getter; import lombok.SneakyThrows; +import org.apache.avro.specific.SpecificRecordBase; import java.nio.ByteBuffer; @@ -96,14 +97,24 @@ public static JSONSchema of(String type, String schemaString) { return new JSONSchema<>(schema, type, schemaString, Object.class); } + /** + * It is same as {@link #of(Class)} except that it generates an JSONSchema typed as supplied base type T. + * + * This is useful for supplying a map of POJO schemas for multiplexed serializers and deserializers. + * + * @param tBase Base class whose type is used in the JSON schema object. + * @param tDerived Class whose schema should be used. + * @param Type of base class. + * @return Returns an AvroSchema with {@link SpecificRecordBase} type. + */ @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) - public static JSONSchema ofBaseType(Class tDerivedClass, Class tClass) { + public static JSONSchema ofBaseType(Class tDerived, Class tBase) { ObjectMapper objectMapper = new ObjectMapper(); JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(objectMapper); - JsonSchema schema = schemaGen.generateSchema(tDerivedClass); + JsonSchema schema = schemaGen.generateSchema(tDerived); String schemaString = objectMapper.writeValueAsString(schema); - - return new JSONSchema<>(schema, tDerivedClass.getSimpleName(), schemaString, tClass, tDerivedClass); + + return new JSONSchema<>(schema, tDerived.getSimpleName(), schemaString, tBase, tDerived); } /** diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java index 8d327b59d..2f5fedd0c 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java @@ -72,7 +72,8 @@ public SchemaInfo getSchemaInfo() { */ @SneakyThrows @SuppressWarnings("unchecked") - public static ProtobufSchema of(Class tClass, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { + public static ProtobufSchema of(Class tClass, + DescriptorProtos.FileDescriptorSet fileDescriptorSet) { T defaultInstance = (T) tClass.getMethod("getDefaultInstance").invoke(null); Parser tParser = (Parser) defaultInstance.getParserForType(); return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getName(), tParser, fileDescriptorSet); @@ -100,15 +101,16 @@ public static ProtobufSchema of(String name, DescriptorProtos.Fi * typed {@link GeneratedMessageV3}. * It is useful in multiplexed deserializer to pass all objects to deserialize into as base {@link GeneratedMessageV3} objects. * - * @param tDerivedClass Class for code generated protobuf message. + * @param tClass Class for code generated protobuf message. * @param fileDescriptorSet file descriptor set representing a protobuf schema. * @param Type of protobuf message * @return {@link ProtobufSchema} with generic type {@link GeneratedMessageV3} that captures protobuf schema and parser of type T. */ @SneakyThrows @SuppressWarnings("unchecked") - public static ProtobufSchema ofBaseType(Class tDerivedClass, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { - T defaultInstance = (T) tDerivedClass.getMethod("getDefaultInstance").invoke(null); + public static ProtobufSchema ofGeneratedMessageV3( + Class tClass, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { + T defaultInstance = (T) tClass.getMethod("getDefaultInstance").invoke(null); Parser tParser = (Parser) defaultInstance.getParserForType(); return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getName(), tParser, fileDescriptorSet); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java index d25b59ac7..1d467ccc5 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java @@ -10,7 +10,6 @@ package io.pravega.schemaregistry.serializers; import io.pravega.client.stream.Serializer; -import io.pravega.schemaregistry.cache.EncodingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java index 8c986e1fb..a169b2ce8 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java @@ -14,7 +14,6 @@ import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; -import io.pravega.schemaregistry.cache.EncodingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.AvroSchema; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java index 6b6164ede..409fa72c7 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java @@ -13,7 +13,6 @@ import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; -import io.pravega.schemaregistry.cache.EncodingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.AvroSchema; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/cache/EncodingCache.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java similarity index 66% rename from serializers/src/main/java/io/pravega/schemaregistry/cache/EncodingCache.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java index b1a336a5f..f8bd3b1d7 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/cache/EncodingCache.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.cache; +package io.pravega.schemaregistry.serializers; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; @@ -17,21 +17,16 @@ import io.pravega.schemaregistry.contract.data.EncodingInfo; import lombok.Data; import lombok.SneakyThrows; -import lombok.Synchronized; -import java.util.HashMap; -import java.util.Map; import java.util.concurrent.ExecutionException; /** * Local cache for storing schemas that are retrieved from the registry service. */ public class EncodingCache { - private static final Map GROUP_CACHE_MAP = new HashMap<>(); - private final LoadingCache encodingCache; - private EncodingCache(String groupId, SchemaRegistryClient schemaRegistryClient) { + EncodingCache(String groupId, SchemaRegistryClient schemaRegistryClient) { encodingCache = CacheBuilder.newBuilder().build(new CacheLoader() { @Override public EncodingInfo load(EncodingId key) { @@ -44,18 +39,6 @@ public EncodingInfo load(EncodingId key) { public EncodingInfo getGroupEncodingInfo(EncodingId encodingId) { return encodingCache.get(encodingId); } - - @Synchronized - public static EncodingCache getEncodingCacheForGroup(String groupId, SchemaRegistryClient schemaRegistryClient) { - Key key = new Key(schemaRegistryClient, groupId); - if (GROUP_CACHE_MAP.containsKey(key)) { - return GROUP_CACHE_MAP.get(key); - } else { - EncodingCache value = new EncodingCache(groupId, schemaRegistryClient); - GROUP_CACHE_MAP.put(key, value); - return value; - } - } @Data private static class Key { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java index 899f2cbb3..b6c174c88 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java @@ -13,7 +13,6 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; -import io.pravega.schemaregistry.cache.EncodingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.JSONSchema; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java index be0240c03..dac65c02b 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java @@ -17,7 +17,6 @@ import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; -import io.pravega.schemaregistry.cache.EncodingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.JSONSchema; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java index 845eeb851..91c35bd26 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java @@ -13,7 +13,6 @@ import com.fasterxml.jackson.annotation.PropertyAccessor; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; -import io.pravega.schemaregistry.cache.EncodingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import lombok.SneakyThrows; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatGenericDeserializer.java index cb71cf78e..bd28f00e2 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatGenericDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatGenericDeserializer.java @@ -10,7 +10,6 @@ package io.pravega.schemaregistry.serializers; import com.google.common.base.Preconditions; -import io.pravega.schemaregistry.cache.EncodingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java index 10a816293..01583c871 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java @@ -13,7 +13,6 @@ import com.google.common.base.Preconditions; import com.google.protobuf.DynamicMessage; import com.google.protobuf.util.JsonFormat; -import io.pravega.schemaregistry.cache.EncodingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java index 28c3bdf67..08c9bc8dc 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java @@ -10,7 +10,6 @@ package io.pravega.schemaregistry.serializers; import com.google.common.base.Preconditions; -import io.pravega.schemaregistry.cache.EncodingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.common.Either; import io.pravega.schemaregistry.contract.data.SchemaInfo; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java index 6a1ddb599..c0c2ba023 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java @@ -10,7 +10,6 @@ package io.pravega.schemaregistry.serializers; import com.google.common.base.Preconditions; -import io.pravega.schemaregistry.cache.EncodingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import org.apache.commons.lang3.SerializationException; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java index a2bd0c9ac..5085e871f 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java @@ -11,7 +11,6 @@ import com.google.common.base.Preconditions; import com.google.protobuf.GeneratedMessageV3; -import io.pravega.schemaregistry.cache.EncodingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.ProtobufSchema; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java index 8740dafb7..f2cf06f76 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java @@ -16,7 +16,6 @@ import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; -import io.pravega.schemaregistry.cache.EncodingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.common.NameUtil; import io.pravega.schemaregistry.contract.data.SchemaInfo; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index 5ec6083f7..a8dac6c6f 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -14,7 +14,6 @@ import com.google.protobuf.GeneratedMessageV3; import com.google.protobuf.Message; import io.pravega.client.stream.Serializer; -import io.pravega.schemaregistry.cache.EncodingCache; import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.common.Either; @@ -91,7 +90,7 @@ public static Serializer avroDeserializer(SerializerConfig config, AvroSc autoCreateGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); return new AvroDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); } @@ -116,7 +115,7 @@ public static Serializer avroGenericDeserializer(SerializerConfig config.getRegistryConfigOrClient().getRight(); autoCreateGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); return new AvroGenericDeserlizer(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); } @@ -167,7 +166,7 @@ public static Serializer avroMultiTypeDeserializer( autoCreateGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), @@ -198,7 +197,7 @@ public static Serializer> avroTypedOrGenericDeseria autoCreateGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), @@ -258,7 +257,7 @@ public static Serializer protobufDeserializer( autoCreateGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); // schema can be null in which case deserialization will happen into dynamic message return new ProtobufDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); @@ -284,7 +283,7 @@ public static Serializer protobufGenericDeserializer(SerializerC autoCreateGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); return new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache); } @@ -330,7 +329,7 @@ public static Serializer protobufMultiTypeDese autoCreateGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), @@ -356,7 +355,7 @@ public static Serializer> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), @@ -414,7 +413,7 @@ public static Serializer jsonDeserializer(SerializerConfig config, JSONSc autoCreateGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); // schema can be null in which case deserialization will happen into dynamic message return new JsonDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); @@ -436,7 +435,7 @@ public static Serializer jsonGenericDeserializer(SerializerCo String groupId = config.getGroupId(); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); return new JsonGenericDeserlizer(groupId, schemaRegistryClient, config.getDecoder(), encodingCache); @@ -458,7 +457,7 @@ public static Serializer jsonStringDeserializer(SerializerConfig config) String groupId = config.getGroupId(); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); return new JsonStringDeserializer(groupId, schemaRegistryClient, config.getDecoder(), encodingCache); } @@ -502,7 +501,7 @@ public static Serializer jsonMultiTypeDeserializer( SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : config.getRegistryConfigOrClient().getRight(); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), @@ -530,7 +529,7 @@ public static Serializer> jsonTypedOrGenericDes autoCreateGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), @@ -592,7 +591,7 @@ public static Serializer customDeserializer(SerializerConfig config, @Nul autoCreateGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); return new AbstractPravegaDeserializer(groupId, schemaRegistryClient, schema, false, config.getDecoder(), encodingCache) { @@ -620,7 +619,7 @@ public static Serializer multiFormatGenericDeserializer(SerializerConfig config.getRegistryConfigOrClient().getRight(); autoCreateGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); AbstractPravegaDeserializer json = new JsonGenericDeserlizer(config.getGroupId(), schemaRegistryClient, config.getDecoder(), encodingCache); @@ -653,7 +652,7 @@ public static Serializer multiFormatGenericDeserializer(SerializerConfig config.getRegistryConfigOrClient().getRight(); autoCreateGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); AbstractPravegaDeserializer json = new JsonGenericDeserlizer(config.getGroupId(), schemaRegistryClient, config.getDecoder(), encodingCache); @@ -694,7 +693,7 @@ public static Serializer deserializeAsJsonString(SerializerConfig config config.getRegistryConfigOrClient().getRight(); autoCreateGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = EncodingCache.getEncodingCacheForGroup(groupId, schemaRegistryClient); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); AbstractPravegaDeserializer json = new JsonGenericDeserlizer(config.getGroupId(), schemaRegistryClient, config.getDecoder(), encodingCache); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java b/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java index ea6b3ef95..b51cdca3b 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java @@ -47,11 +47,11 @@ public void testAvroSchema() { assertNotNull(schema3.getSchema()); assertEquals(schema3.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); - AvroSchema schemabase1 = AvroSchema.ofBaseType(Test1.class); + AvroSchema schemabase1 = AvroSchema.ofSpecificRecord(Test1.class); assertNotNull(schemabase1.getSchema()); assertEquals(schemabase1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); - AvroSchema schemabase2 = AvroSchema.ofBaseType(Test2.class); + AvroSchema schemabase2 = AvroSchema.ofSpecificRecord(Test2.class); assertNotNull(schemabase2.getSchema()); assertEquals(schemabase2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); } @@ -72,12 +72,12 @@ public void testProtobufSchema() throws IOException { assertNotNull(schema2.getDescriptorProto()); assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); - ProtobufSchema baseSchema1 = ProtobufSchema.ofBaseType(ProtobufTest.Message1.class, descriptorSet); + ProtobufSchema baseSchema1 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message1.class, descriptorSet); assertNotNull(baseSchema1.getParser()); assertNotNull(baseSchema1.getDescriptorProto()); assertEquals(baseSchema1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); - ProtobufSchema baseSchema2 = ProtobufSchema.ofBaseType(ProtobufTest.Message2.class, descriptorSet); + ProtobufSchema baseSchema2 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class, descriptorSet); assertNotNull(baseSchema2.getParser()); assertNotNull(baseSchema2.getDescriptorProto()); assertEquals(baseSchema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/cache/CacheTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java similarity index 87% rename from serializers/src/test/java/io/pravega/schemaregistry/cache/CacheTest.java rename to serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java index bdb84cf30..d0eea92f3 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/cache/CacheTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java @@ -7,7 +7,7 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.cache; +package io.pravega.schemaregistry.serializers; import com.google.common.collect.ImmutableMap; import io.pravega.schemaregistry.client.SchemaRegistryClient; @@ -33,9 +33,10 @@ public void testCache() { String groupId = "groupId"; EncodingId encodingId = new EncodingId(0); EncodingInfo encodingInfo = new EncodingInfo(new VersionInfo("name", 0, 0), - new SchemaInfo("name", SerializationFormat.Avro, ByteBuffer.wrap(new byte[0]), ImmutableMap.of()), CodecFactory.snappy().getCodecType()); + new SchemaInfo("name", SerializationFormat.Avro, ByteBuffer.wrap(new byte[0]), ImmutableMap.of()), + CodecFactory.snappy().getCodecType()); doAnswer(x -> encodingInfo).when(client).getEncodingInfo(eq(groupId), eq(encodingId)); - EncodingCache cache = EncodingCache.getEncodingCacheForGroup(groupId, client); + EncodingCache cache = new EncodingCache(groupId, client); assertEquals(encodingInfo, cache.getGroupEncodingInfo(encodingId)); } } diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index 93ee7699e..0fba45144 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -94,8 +94,8 @@ public void testAvroSerializers() { // multi type Test2 test2 = new Test2("name", 1, "2"); - AvroSchema schema1Base = AvroSchema.ofBaseType(Test1.class); - AvroSchema schema2Base = AvroSchema.ofBaseType(Test2.class); + AvroSchema schema1Base = AvroSchema.ofSpecificRecord(Test1.class); + AvroSchema schema2Base = AvroSchema.ofSpecificRecord(Test2.class); Map, AvroSchema> map = new HashMap<>(); map.put(Test1.class, schema1Base); map.put(Test2.class, schema2Base); @@ -188,8 +188,8 @@ public void testProtobufSerializers() throws IOException { // multi type ProtobufTest.Message3 message2 = ProtobufTest.Message3.newBuilder().setName("name").setField1(1).setField2(2).build(); - ProtobufSchema schema1Base = ProtobufSchema.ofBaseType(ProtobufTest.Message2.class, descriptorSet); - ProtobufSchema schema2Base = ProtobufSchema.ofBaseType(ProtobufTest.Message3.class, descriptorSet); + ProtobufSchema schema1Base = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class, descriptorSet); + ProtobufSchema schema2Base = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message3.class, descriptorSet); Map, ProtobufSchema> map = new HashMap<>(); map.put(ProtobufTest.Message2.class, schema1Base); map.put(ProtobufTest.Message3.class, schema2Base); From f8fd8adf2c82cd7e9236c7238e47a648dfdfb57b Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 25 Jun 2020 02:14:49 -0700 Subject: [PATCH 35/70] javadocs Signed-off-by: Shivesh Ranjan --- .../schemaregistry/schemas/JSONSchema.java | 13 ++- .../serializers/JsonDeserlizer.java | 2 +- .../serializers/SerializerConfig.java | 99 +++++++++++++++---- 3 files changed, 87 insertions(+), 27 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java index 3015d7242..aef0cf8f5 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -32,10 +32,9 @@ public class JSONSchema implements SchemaContainer { @Getter private final String schemaString; + private final Class base; @Getter - private final Class tClass; - @Getter - private final Class tDerivedClass; + private final Class tClass; @Getter private final JsonSchema schema; @@ -46,22 +45,22 @@ private JSONSchema(JsonSchema schema, String name, String schemaString, Class this(schema, name, schemaString, tClass, tClass); } - private JSONSchema(JsonSchema schema, String name, String schemaString, Class tClass, Class tDerivedClass) { + private JSONSchema(JsonSchema schema, String name, String schemaString, Class base, Class derived) { String type = name != null ? name : schema.getId(); // Add empty name if the name is not supplied and cannot be extracted from the json schema id. type = type != null ? type : ""; this.schemaString = schemaString; this.schemaInfo = new SchemaInfo(type, SerializationFormat.Json, getSchemaBytes(), ImmutableMap.of()); - this.tClass = tClass; - this.tDerivedClass = tDerivedClass; + this.base = base; + this.tClass = derived; this.schema = schema; } private JSONSchema(SchemaInfo schemaInfo, JsonSchema schema, String schemaString, Class tClass) { this.schemaString = schemaString; this.schemaInfo = schemaInfo; + this.base = tClass; this.tClass = tClass; - this.tDerivedClass = tClass; this.schema = schema; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java index b6c174c88..f8d56982c 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java @@ -42,6 +42,6 @@ class JsonDeserlizer extends AbstractPravegaDeserializer { @SneakyThrows({JsonProcessingException.class, IOException.class}) @Override protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { - return objectMapper.readValue(inputStream, jsonSchema.getTDerivedClass()); + return objectMapper.readValue(inputStream, jsonSchema.getTClass()); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index 7f526aeef..b5364e283 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -10,6 +10,7 @@ package io.pravega.schemaregistry.serializers; import com.google.common.base.Preconditions; +import com.google.common.base.Strings; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.client.SchemaRegistryClientConfig; import io.pravega.schemaregistry.codec.Codec; @@ -41,55 +42,70 @@ public class SerializerConfig { private final static Codec NOOP = CodecFactory.none(); private final static Codec GZIP = CodecFactory.gzip(); private final static Codec SNAPPY = CodecFactory.snappy(); - - + /** - * Name of the group. + * Name of the group. */ private final String groupId; /** * Either the registry client or the {@link SchemaRegistryClientConfig} that can be used for creating a new registry client. - * Exactly one of the two option has to be supplied. + * Exactly one of the two option has to be supplied. */ private final Either registryConfigOrClient; /** - * Flag to tell the serializer if the schema should be automatically registered before using it in {@link io.pravega.client.stream.EventStreamWriter}. + * Flag to tell the serializer if the schema should be automatically registered before using it in {@link io.pravega.client.stream.EventStreamWriter}. * It is recommended to register keep this flag as false in production systems and manage schema evolution explicitly and - * in lockstep with upgrade of existing pravega client applications. + * in lockstep with upgrade of existing pravega client applications. */ private final boolean registerSchema; /** - * Flag to tell the serializer if the codec should be automatically registered before using the serializer in - * {@link io.pravega.client.stream.EventStreamWriter}. + * Flag to tell the serializer if the codec should be automatically registered before using the serializer in + * {@link io.pravega.client.stream.EventStreamWriter}. * It is recommended to register keep this flag as false in production systems and manage codecTypes used by writers explicitly - * so that readers are aware of encodings used. + * so that readers are aware of encodings used. */ private final boolean registerCodec; /** - * Codec to use for encoding events after serializing them. + * Codec to use for encoding events after serializing them. */ private final Codec codec; /** * Function that should be applied on serialized data read from stream. This is invoked after reading the codecType - * from {@link EncodingInfo} and using the codec type read from it. - * It should return the decoded data back to the deserializer. + * from {@link EncodingInfo} and using the codec type read from it. + * It should return the decoded data back to the deserializer. */ private final Decoder decoder; /** - * Tells the deserializer that if supplied decoder codecTypes do not match group codecTypes then fail and exit upfront. + * Tells the deserializer that if supplied decoder codecTypes do not match group codecTypes then fail and exit upfront. */ private final boolean failOnCodecMismatch; /** - * Flag to tell the serializer if the group should be created automatically. - * It is recommended to register keep this flag as false in production systems and create groups and add schemas + * Flag to tell the serializer if the group should be created automatically. + * It is recommended to register keep this flag as false in production systems and create groups and add schemas */ private final boolean createGroup; /** - * Group properties to use for creating the group if createGroup is set to true. + * Group properties to use for creating the group if createGroup is set to true. */ private final GroupProperties groupProperties; + private SerializerConfig(String groupId, Either registryConfigOrClient, + boolean registerSchema, boolean registerCodec, Codec codec, Decoder decoder, boolean failOnCodecMismatch, + boolean createGroup, GroupProperties groupProperties) { + Preconditions.checkArgument(!Strings.isNullOrEmpty(groupId), "Group id needs to be supplied"); + Preconditions.checkArgument(registryConfigOrClient != null, "Either registry client or config needs to be supplied"); + this.groupId = groupId; + this.registryConfigOrClient = registryConfigOrClient; + this.registerSchema = registerSchema; + this.registerCodec = registerCodec; + this.codec = codec; + this.decoder = decoder; + this.failOnCodecMismatch = failOnCodecMismatch; + this.createGroup = createGroup; + this.groupProperties = groupProperties; + } + public static final class SerializerConfigBuilder { private Codec codec = NOOP; @@ -103,31 +119,76 @@ public static final class SerializerConfigBuilder { private GroupProperties groupProperties = GroupProperties.builder().build(); - public SerializerConfigBuilder decoder(String codecType, Function decoder) { + /** + * Add codec type to corresponding decoder function which will be used to decode data encoded using encoding type codecType. + * + * @param codecType codec type used for encoding. + * @param decoder decoder function to use for decoding the data. + * @return Builder. + */ + public SerializerConfigBuilder addDecoder(String codecType, Function decoder) { this.decoder = new Decoder(codecType, decoder); return this; } + /** + * Automatically create group with provided group properties values, defaulting compatibility to Full Transitive + * and allowMultipleTypes to true. + * Group creation is idempotent. + * + * @param serializationFormat {@link GroupProperties#serializationFormat}. + * @return Builder + */ public SerializerConfigBuilder createGroup(SerializationFormat serializationFormat) { return createGroup(serializationFormat, true); } + /** + * Automatically create group with provided group properties values, defaulting compatibility to Full Transitive. + * Group creation is idempotent. + * + * @param serializationFormat {@link GroupProperties#serializationFormat}. + * @param allowMultipleTypes {@link GroupProperties#allowMultipleTypes} + * @return Builder + */ public SerializerConfigBuilder createGroup(SerializationFormat serializationFormat, boolean allowMultipleTypes) { return createGroup(serializationFormat, Compatibility.fullTransitive(), allowMultipleTypes); } - public SerializerConfigBuilder createGroup(SerializationFormat serializationFormat, Compatibility rules, boolean allowMultipleTypes) { + /** + * Automatically create group with provided group properties. Group creation is idempotent. + * + * @param serializationFormat {@link GroupProperties#serializationFormat}. + * @param policy {@link GroupProperties#compatibility} + * @param allowMultipleTypes {@link GroupProperties#allowMultipleTypes} + * @return Builder + */ + public SerializerConfigBuilder createGroup(SerializationFormat serializationFormat, Compatibility policy, boolean allowMultipleTypes) { this.createGroup = true; - this.groupProperties = new GroupProperties(serializationFormat, rules, allowMultipleTypes); + this.groupProperties = new GroupProperties(serializationFormat, policy, allowMultipleTypes); return this; } + /** + * Schema Registry client. Either this or config should be supplied. Whichever is supplied later overrides + * the other. + * + * @param client Schema Registry client + * @return Builder + */ public SerializerConfigBuilder registryClient(SchemaRegistryClient client) { Preconditions.checkArgument(client != null); this.registryConfigOrClient = Either.right(client); return this; } + /** + * Schema Registry client config. Either this or client should be supplied. Whichever is supplied later overrides + * the other. + * + * @param config Schema Registry client configuration. + * @return Builder + */ public SerializerConfigBuilder registryConfig(SchemaRegistryClientConfig config) { Preconditions.checkArgument(config != null); this.registryConfigOrClient = Either.left(config); From c98ee7b0616564d892e97824d9bfb617c16ba951 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 25 Jun 2020 02:16:38 -0700 Subject: [PATCH 36/70] remove the hashutil from this branch Signed-off-by: Shivesh Ranjan --- .../schemaregistry/common/HashUtil.java | 27 ------------------- 1 file changed, 27 deletions(-) delete mode 100644 common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java diff --git a/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java b/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java deleted file mode 100644 index e03b29981..000000000 --- a/common/src/main/java/io/pravega/schemaregistry/common/HashUtil.java +++ /dev/null @@ -1,27 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry.common; - -import com.google.common.hash.HashFunction; -import com.google.common.hash.Hashing; - -public class HashUtil { - private static final HashFunction HASH = Hashing.murmur3_128(); - - /** - * Computes a 64 bit hash of supplied bytes using 128 bit murmur3 hash function and taking its first 8 bytes. - * - * @param bytes bytes to compute hash of. - * @return a 64 bit hash of the given bytes. - */ - public static long getFingerprint(byte[] bytes) { - return HASH.hashBytes(bytes).asLong(); - } -} From c3789ca8d86ed2fb768f978d15b3caf993480cdd Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 25 Jun 2020 02:39:44 -0700 Subject: [PATCH 37/70] minor imp Signed-off-by: Shivesh Ranjan --- .../MultiplexedAndGenericDeserializer.java | 13 +++++++++---- .../serializers/MultiplexedDeserializer.java | 12 ++++++------ 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java index 08c9bc8dc..e75dff59f 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java @@ -17,6 +17,8 @@ import java.io.InputStream; import java.util.Map; +import static io.pravega.schemaregistry.common.NameUtil.extractName; + class MultiplexedAndGenericDeserializer extends AbstractPravegaDeserializer> { private final Map> deserializers; private final AbstractPravegaDeserializer genericDeserializer; @@ -34,10 +36,13 @@ class MultiplexedAndGenericDeserializer extends AbstractPravegaDeserialize @Override protected Either deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { Preconditions.checkNotNull(writerSchema); - if (deserializers.containsKey(writerSchema.getType())) { - return Either.left(deserializers.get(writerSchema.getType()).deserialize(inputStream, writerSchema, readerSchema)); - } else { + AbstractPravegaDeserializer deserializer = deserializers.containsKey(writerSchema.getType()) ? + deserializers.get(writerSchema.getType()) : + deserializers.get(extractName(writerSchema.getType())); + if (deserializer == null) { return Either.right(genericDeserializer.deserialize(inputStream, writerSchema, readerSchema)); - } + } else { + return Either.left(deserializers.get(writerSchema.getType()).deserialize(inputStream, writerSchema, readerSchema)); + } } } \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java index c0c2ba023..7447d9674 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java @@ -33,12 +33,12 @@ class MultiplexedDeserializer extends AbstractPravegaDeserializer { @Override protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { Preconditions.checkNotNull(writerSchema); - AbstractPravegaDeserializer deserializer = deserializers - .entrySet() - .stream() - .filter(x -> x.getKey().equals(writerSchema.getType()) || extractName(x.getKey()).equals(writerSchema.getType())) - .findAny().orElseThrow(() -> new SerializationException("deserializer not supplied for type " + writerSchema.getType())) - .getValue(); + AbstractPravegaDeserializer deserializer = deserializers.containsKey(writerSchema.getType()) ? + deserializers.get(writerSchema.getType()) : + deserializers.get(extractName(writerSchema.getType())); + if (deserializer == null) { + throw new SerializationException("deserializer not supplied for type " + writerSchema.getType()); + } return deserializer.deserialize(inputStream, writerSchema, readerSchema); } } \ No newline at end of file From 32933ba636319402c948edbe1e36d3134ca14ea9 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Sun, 28 Jun 2020 07:10:47 -0700 Subject: [PATCH 38/70] use full name Signed-off-by: Shivesh Ranjan --- .../schemaregistry/schemas/AvroSchema.java | 12 ++++++------ .../schemaregistry/schemas/JSONSchema.java | 4 ++-- .../schemaregistry/schemas/ProtobufSchema.java | 4 ++-- .../MultiplexedAndGenericDeserializer.java | 6 +----- .../serializers/MultiplexedDeserializer.java | 6 +----- .../serializers/ProtobufGenericDeserlizer.java | 7 +++++-- .../schemaregistry/schemas/TestSchemas.java | 3 +-- .../serializers/SerializerTest.java | 17 ++++++++++++++++- .../testobjs/SchemaDefinitions.java | 3 +++ 9 files changed, 37 insertions(+), 25 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java index 73544bec0..b35cad133 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java @@ -37,16 +37,16 @@ public class AvroSchema implements SchemaContainer { private AvroSchema(Schema schema, Class tClass) { this.schema = schema; - this.schemaInfo = new SchemaInfo(schema.getName(), + this.schemaInfo = new SchemaInfo(schema.getFullName(), SerializationFormat.Avro, getSchemaBytes(), ImmutableMap.of()); this.tClass = tClass; } - private AvroSchema(SchemaInfo schemaInfo, Class tClass) { + private AvroSchema(SchemaInfo schemaInfo) { String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); this.schema = new Schema.Parser().parse(schemaString); this.schemaInfo = schemaInfo; - this.tClass = tClass; + this.tClass = null; } /** @@ -74,8 +74,8 @@ public static AvroSchema of(Class tClass) { * @param schema Schema to use. * @return Returns an AvroSchema with {@link GenericRecord} type. */ - public static AvroSchema of(Schema schema) { - return new AvroSchema<>(schema, null); + public static AvroSchema of(Schema schema) { + return new AvroSchema<>(schema, Object.class); } /** @@ -116,7 +116,7 @@ public static AvroSchema ofSp * @return Returns an AvroSchema with {@link GenericRecord} type. */ public static AvroSchema from(SchemaInfo schemaInfo) { - return new AvroSchema<>(schemaInfo, null); + return new AvroSchema<>(schemaInfo); } private ByteBuffer getSchemaBytes() { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java index aef0cf8f5..f8925ac19 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -79,7 +79,7 @@ public static JSONSchema of(Class tClass) { JsonSchema schema = schemaGen.generateSchema(tClass); String schemaString = objectMapper.writeValueAsString(schema); - return new JSONSchema<>(schema, tClass.getSimpleName(), schemaString, tClass); + return new JSONSchema<>(schema, tClass.getName(), schemaString, tClass); } /** @@ -113,7 +113,7 @@ public static JSONSchema ofBaseType(Class tDerived, Class JsonSchema schema = schemaGen.generateSchema(tDerived); String schemaString = objectMapper.writeValueAsString(schema); - return new JSONSchema<>(schema, tDerived.getSimpleName(), schemaString, tBase, tDerived); + return new JSONSchema<>(schema, tDerived.getName(), schemaString, tBase, tDerived); } /** diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java index 2f5fedd0c..9df3938fe 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java @@ -76,7 +76,7 @@ public static ProtobufSchema of(Class tClas DescriptorProtos.FileDescriptorSet fileDescriptorSet) { T defaultInstance = (T) tClass.getMethod("getDefaultInstance").invoke(null); Parser tParser = (Parser) defaultInstance.getParserForType(); - return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getName(), tParser, fileDescriptorSet); + return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getFullName(), tParser, fileDescriptorSet); } /** @@ -113,7 +113,7 @@ public static ProtobufSchema ofGeneratedMessag T defaultInstance = (T) tClass.getMethod("getDefaultInstance").invoke(null); Parser tParser = (Parser) defaultInstance.getParserForType(); - return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getName(), tParser, fileDescriptorSet); + return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getFullName(), tParser, fileDescriptorSet); } /** diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java index e75dff59f..d4a621196 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java @@ -17,8 +17,6 @@ import java.io.InputStream; import java.util.Map; -import static io.pravega.schemaregistry.common.NameUtil.extractName; - class MultiplexedAndGenericDeserializer extends AbstractPravegaDeserializer> { private final Map> deserializers; private final AbstractPravegaDeserializer genericDeserializer; @@ -36,9 +34,7 @@ class MultiplexedAndGenericDeserializer extends AbstractPravegaDeserialize @Override protected Either deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { Preconditions.checkNotNull(writerSchema); - AbstractPravegaDeserializer deserializer = deserializers.containsKey(writerSchema.getType()) ? - deserializers.get(writerSchema.getType()) : - deserializers.get(extractName(writerSchema.getType())); + AbstractPravegaDeserializer deserializer = deserializers.get(writerSchema.getType()); if (deserializer == null) { return Either.right(genericDeserializer.deserialize(inputStream, writerSchema, readerSchema)); } else { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java index 7447d9674..73ceac2f5 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java @@ -17,8 +17,6 @@ import java.io.InputStream; import java.util.Map; -import static io.pravega.schemaregistry.common.NameUtil.extractName; - class MultiplexedDeserializer extends AbstractPravegaDeserializer { private final Map> deserializers; @@ -33,9 +31,7 @@ class MultiplexedDeserializer extends AbstractPravegaDeserializer { @Override protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { Preconditions.checkNotNull(writerSchema); - AbstractPravegaDeserializer deserializer = deserializers.containsKey(writerSchema.getType()) ? - deserializers.get(writerSchema.getType()) : - deserializers.get(extractName(writerSchema.getType())); + AbstractPravegaDeserializer deserializer = deserializers.get(writerSchema.getType()); if (deserializer == null) { throw new SerializationException("deserializer not supplied for type " + writerSchema.getType()); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java index f2cf06f76..1736749c8 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java @@ -44,8 +44,11 @@ public Descriptors.Descriptor load(SchemaInfo schemaToUse) throws Exception { String name = tokens[0]; String pckg = tokens[1]; DescriptorProtos.FileDescriptorProto mainDescriptor = descriptorSet.getFileList().stream() - .filter(x -> x.getPackage().startsWith(pckg) && - x.getMessageTypeList().stream().anyMatch(y -> y.getName().equals(name))) + .filter(x -> { + String descriptorPackage = x.getPackage() == null ? "" : x.getPackage(); + return pckg.equals(descriptorPackage) && + x.getMessageTypeList().stream().anyMatch(y -> y.getName().equals(name)); + }) .findAny().orElseThrow(IllegalArgumentException::new); Descriptors.FileDescriptor[] dependencyArray = new Descriptors.FileDescriptor[count]; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java b/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java index b51cdca3b..0d397971c 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java @@ -20,7 +20,6 @@ import io.pravega.schemaregistry.testobjs.generated.ProtobufTest; import io.pravega.schemaregistry.testobjs.generated.Test1; import io.pravega.schemaregistry.testobjs.generated.Test2; -import org.apache.avro.generic.GenericRecord; import org.apache.avro.specific.SpecificRecordBase; import org.junit.Test; @@ -35,7 +34,7 @@ public class TestSchemas { @Test public void testAvroSchema() { - AvroSchema schema = AvroSchema.of(SchemaDefinitions.SCHEMA1); + AvroSchema schema = AvroSchema.of(SchemaDefinitions.SCHEMA1); assertNotNull(schema.getSchema()); assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Avro); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index 0fba45144..8aa92e1e3 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -31,6 +31,7 @@ import io.pravega.schemaregistry.testobjs.Address; import io.pravega.schemaregistry.testobjs.DerivedUser1; import io.pravega.schemaregistry.testobjs.DerivedUser2; +import io.pravega.schemaregistry.testobjs.SchemaDefinitions; import io.pravega.schemaregistry.testobjs.generated.ProtobufTest; import io.pravega.schemaregistry.testobjs.generated.Test1; import io.pravega.schemaregistry.testobjs.generated.Test2; @@ -38,6 +39,7 @@ import lombok.Data; import lombok.NoArgsConstructor; import lombok.SneakyThrows; +import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericRecord; import org.apache.avro.specific.SpecificRecordBase; import org.junit.Test; @@ -64,7 +66,6 @@ public void testAvroSerializers() { SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); AvroSchema schema1 = AvroSchema.of(Test1.class); AvroSchema schema2 = AvroSchema.of(Test2.class); - VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); VersionInfo versionInfo2 = new VersionInfo("name", 1, 1); doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) @@ -77,6 +78,20 @@ public void testAvroSerializers() { doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); + AvroSchema of = AvroSchema.of(SchemaDefinitions.ENUM); + VersionInfo versionInfo3 = new VersionInfo(of.getSchema().getFullName(), 0, 2); + doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(of.getSchemaInfo())); + doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); + doAnswer(x -> new EncodingInfo(versionInfo3, of.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + + Serializer serializerStr = SerializerFactory.avroSerializer(config, of); + GenericData.EnumSymbol enumSymbol = new GenericData.EnumSymbol(of.getSchema(), "a"); + ByteBuffer serialized1 = serializerStr.serialize(enumSymbol); + + Serializer deserializer1 = SerializerFactory.avroDeserializer(config, of); + Object deserializedEnum = deserializer1.deserialize(serialized1); + assertEquals(deserializedEnum, enumSymbol); + Serializer serializer = SerializerFactory.avroSerializer(config, schema1); Test1 test1 = new Test1("name", 1); ByteBuffer serialized = serializer.serialize(test1); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java index fd5830720..f7708373a 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java @@ -13,6 +13,9 @@ import org.apache.avro.SchemaBuilder; public class SchemaDefinitions { + public static final Schema ENUM = SchemaBuilder + .enumeration("a").symbols("a", "b", "c"); + public static final Schema SCHEMA1 = SchemaBuilder .record("MyTest") .fields() From d8b06758578a94ddfda54bd3ad5ae38cdb53345c Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Sun, 28 Jun 2020 07:18:39 -0700 Subject: [PATCH 39/70] ofRecord in avro schema Signed-off-by: Shivesh Ranjan --- .../pravega/schemaregistry/schemas/AvroSchema.java | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java index b35cad133..d77b9eb2e 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java @@ -69,7 +69,8 @@ public static AvroSchema of(Class tClass) { } /** - * Method to create a typed AvroSchema of type {@link GenericRecord} from the given schema. + * Method to create a typed AvroSchema of type {@link Object} from the given schema. + * This schema can be used to express any non record schema. * * @param schema Schema to use. * @return Returns an AvroSchema with {@link GenericRecord} type. @@ -78,6 +79,17 @@ public static AvroSchema of(Schema schema) { return new AvroSchema<>(schema, Object.class); } + /** + * Method to create a typed AvroSchema of type {@link GenericRecord} from the given schema. + * + * @param schema Schema to use. + * @return Returns an AvroSchema with {@link GenericRecord} type. + */ + public static AvroSchema ofRecord(Schema schema) { + Preconditions.checkArgument(schema.getType().equals(Schema.Type.RECORD)); + return new AvroSchema<>(schema, GenericRecord.class); + } + /** * It is same as {@link #of(Class)} except that it generates an AvroSchema typed as T. * From 322d3c0b1219bce2b28c6bd262fa51ee7ef15f30 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Mon, 29 Jun 2020 01:33:22 -0700 Subject: [PATCH 40/70] exact type to message match for protobuf generic deserializer Signed-off-by: Shivesh Ranjan --- .../serializers/ProtobufGenericDeserlizer.java | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java index 1736749c8..4725df382 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java @@ -10,6 +10,7 @@ package io.pravega.schemaregistry.serializers; import com.google.common.base.Preconditions; +import com.google.common.base.Strings; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; @@ -43,11 +44,16 @@ public Descriptors.Descriptor load(SchemaInfo schemaToUse) throws Exception { String[] tokens = NameUtil.extractNameAndQualifier(schemaToUse.getType()); String name = tokens[0]; String pckg = tokens[1]; - DescriptorProtos.FileDescriptorProto mainDescriptor = descriptorSet.getFileList().stream() + DescriptorProtos.FileDescriptorProto mainDescriptor = descriptorSet + .getFileList().stream() .filter(x -> { - String descriptorPackage = x.getPackage() == null ? "" : x.getPackage(); - return pckg.equals(descriptorPackage) && - x.getMessageTypeList().stream().anyMatch(y -> y.getName().equals(name)); + boolean match; + if (x.getPackage() == null) { + match = Strings.isNullOrEmpty(pckg); + } else { + match = x.getPackage().equals(pckg); + } + return match && x.getMessageTypeList().stream().anyMatch(y -> y.getName().equals(name)); }) .findAny().orElseThrow(IllegalArgumentException::new); From a586f4f474a124439a49926771244aa08fae7212 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Tue, 30 Jun 2020 01:53:19 -0700 Subject: [PATCH 41/70] generalize the multiformat deserialization Signed-off-by: Shivesh Ranjan --- .../serializers/JsonGenericDeserlizer.java | 6 +- ...ericObject.java => MapWithJsonSchema.java} | 2 +- .../MultipleFormatJsonStringDeserializer.java | 57 ---------- ... MultipleFormatTransformDeserializer.java} | 21 ++-- .../serializers/SerializerFactory.java | 106 ++++++++---------- .../serializers/SerializerTest.java | 14 +-- 6 files changed, 69 insertions(+), 137 deletions(-) rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{JSonGenericObject.java => MapWithJsonSchema.java} (94%) delete mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{MultipleFormatGenericDeserializer.java => MultipleFormatTransformDeserializer.java} (50%) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java index dac65c02b..cc0a342e3 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java @@ -27,7 +27,7 @@ import java.util.Map; import java.util.concurrent.ExecutionException; -class JsonGenericDeserlizer extends AbstractPravegaDeserializer { +class JsonGenericDeserlizer extends AbstractPravegaDeserializer { private final ObjectMapper objectMapper; private final LoadingCache knownSchemas; @@ -48,9 +48,9 @@ public JsonSchema load(SchemaInfo schemaInfo) throws Exception { @SneakyThrows({JsonProcessingException.class, ExecutionException.class, IOException.class}) @Override - protected JSonGenericObject deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + protected MapWithJsonSchema deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { Map obj = objectMapper.readValue(inputStream, Map.class); JsonSchema schema = writerSchemaInfo == null ? null : knownSchemas.get(writerSchemaInfo); - return new JSonGenericObject(obj, schema); + return new MapWithJsonSchema(obj, schema); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JSonGenericObject.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MapWithJsonSchema.java similarity index 94% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/JSonGenericObject.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/MapWithJsonSchema.java index dd12752eb..bc9278ca3 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JSonGenericObject.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MapWithJsonSchema.java @@ -15,7 +15,7 @@ import java.util.Map; @Data -public class JSonGenericObject { +public class MapWithJsonSchema { private final Map object; private final JsonSchema jsonSchema; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java deleted file mode 100644 index 01583c871..000000000 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatJsonStringDeserializer.java +++ /dev/null @@ -1,57 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry.serializers; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Preconditions; -import com.google.protobuf.DynamicMessage; -import com.google.protobuf.util.JsonFormat; -import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.contract.data.SerializationFormat; -import lombok.SneakyThrows; -import org.apache.avro.generic.GenericRecord; - -import java.io.InputStream; -import java.util.Map; - -class MultipleFormatJsonStringDeserializer extends AbstractPravegaDeserializer { - private final Map genericDeserializers; - private final ObjectMapper objectMapper = new ObjectMapper(); - - MultipleFormatJsonStringDeserializer(String groupId, SchemaRegistryClient client, - Map genericDeserializers, - SerializerConfig.Decoder decoder, - EncodingCache encodingCache) { - super(groupId, client, null, false, decoder, encodingCache); - this.genericDeserializers = genericDeserializers; - } - - @Override - protected String deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { - Preconditions.checkNotNull(writerSchema); - return toJsonString(genericDeserializers.get(writerSchema.getSerializationFormat()).deserialize(inputStream, writerSchema, readerSchema)); - } - - @SneakyThrows - private String toJsonString(Object deserialize) { - if (deserialize instanceof GenericRecord) { - return deserialize.toString(); - } else if (deserialize instanceof DynamicMessage) { - JsonFormat.Printer printer = JsonFormat.printer().preservingProtoFieldNames().usingTypeRegistry(JsonFormat.TypeRegistry.newBuilder().build()); - return printer.print((DynamicMessage) deserialize); - } else if (deserialize instanceof JSonGenericObject) { - Map myobject = ((JSonGenericObject) deserialize).getObject(); - return objectMapper.writeValueAsString(myobject); - } else { - return deserialize.toString(); - } - } -} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatTransformDeserializer.java similarity index 50% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatGenericDeserializer.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatTransformDeserializer.java index bd28f00e2..0098cb7d6 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatGenericDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatTransformDeserializer.java @@ -1,10 +1,10 @@ /** * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 */ package io.pravega.schemaregistry.serializers; @@ -16,21 +16,24 @@ import java.io.InputStream; import java.util.Map; +import java.util.function.BiFunction; -class MultipleFormatGenericDeserializer extends AbstractPravegaDeserializer { +class MultipleFormatTransformDeserializer extends AbstractPravegaDeserializer { private final Map genericDeserializers; + private final BiFunction transform; - MultipleFormatGenericDeserializer(String groupId, SchemaRegistryClient client, - Map genericDeserializers, - SerializerConfig.Decoder decoder, - EncodingCache encodingCache) { + MultipleFormatTransformDeserializer(String groupId, SchemaRegistryClient client, + Map genericDeserializers, + SerializerConfig.Decoder decoder, + EncodingCache encodingCache, BiFunction transform) { super(groupId, client, null, false, decoder, encodingCache); this.genericDeserializers = genericDeserializers; + this.transform = transform; } @Override - protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { Preconditions.checkNotNull(writerSchema); - return genericDeserializers.get(writerSchema.getSerializationFormat()).deserialize(inputStream, writerSchema, readerSchema); + return transform.apply(writerSchema.getSerializationFormat(), genericDeserializers.get(writerSchema.getSerializationFormat()).deserialize(inputStream, writerSchema, readerSchema)); } } \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index a8dac6c6f..e604d4353 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -9,13 +9,15 @@ */ package io.pravega.schemaregistry.serializers; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.protobuf.DynamicMessage; import com.google.protobuf.GeneratedMessageV3; import com.google.protobuf.Message; +import com.google.protobuf.util.JsonFormat; import io.pravega.client.stream.Serializer; -import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; import io.pravega.schemaregistry.common.Either; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.SchemaInfo; @@ -24,6 +26,7 @@ import io.pravega.schemaregistry.schemas.JSONSchema; import io.pravega.schemaregistry.schemas.ProtobufSchema; import io.pravega.schemaregistry.schemas.SchemaContainer; +import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; @@ -31,9 +34,11 @@ import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.BiFunction; import java.util.stream.Collectors; @Slf4j @@ -428,7 +433,7 @@ public static Serializer jsonDeserializer(SerializerConfig config, JSONSc * @param config Serializer Config used for instantiating a new serializer. * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer jsonGenericDeserializer(SerializerConfig config) { + public static Serializer jsonGenericDeserializer(SerializerConfig config) { SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : config.getRegistryConfigOrClient().getRight(); @@ -520,7 +525,7 @@ public static Serializer jsonMultiTypeDeserializer( * @param Base type of schemas. * @return a Deserializer which can deserialize events of different types in the stream into typed objects. */ - public static Serializer> jsonTypedOrGenericDeserializer( + public static Serializer> jsonTypedOrGenericDeserializer( SerializerConfig config, Map, JSONSchema> schemas) { String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? @@ -604,7 +609,6 @@ protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, Schema // endregion // region multi format deserializer - /** * A deserializer that can read data where each event could be written with either of avro, protobuf or json * serialization formats. @@ -613,39 +617,38 @@ protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, Schema * @return a deserializer that can deserialize events serialized as protobuf, json or avro into java objects. */ public static Serializer multiFormatGenericDeserializer(SerializerConfig config) { - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - AbstractPravegaDeserializer json = new JsonGenericDeserlizer(config.getGroupId(), schemaRegistryClient, - config.getDecoder(), encodingCache); - AbstractPravegaDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), - encodingCache); - AbstractPravegaDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), - encodingCache); + return deserializeAsT(config, Collections.emptyMap(), (x, y) -> y); + } - Map map = new HashMap<>(); - map.put(SerializationFormat.Json, json); - map.put(SerializationFormat.Avro, avro); - map.put(SerializationFormat.Protobuf, protobuf); - return new MultipleFormatGenericDeserializer(groupId, schemaRegistryClient, map, config.getDecoder(), - encodingCache); + /** + * A deserializer that can read data where each event could be written with different serialization formats and + * deserializes and converts them to a json string. + * + * @param config serializer config + * @return a deserializer that can deserialize protobuf, json or avro events into java objects. + */ + public static Serializer deserializeAsJsonString(SerializerConfig config) { + JsonFormat.Printer protoBufPrinter = JsonFormat.printer().preservingProtoFieldNames().usingTypeRegistry(JsonFormat.TypeRegistry.newBuilder().build()); + ObjectMapper objectMapper = new ObjectMapper(); + return deserializeAsT(config, Collections.emptyMap(), (x, y) -> toJsonString(x, y, protoBufPrinter, objectMapper)); } /** * A deserializer that can read data where each event could be written with different serialization formats. * It has built in deserializers for protobuf, avro and json. In addition to it, users can supply - * deserializers for their custom formats. + * deserializers {@link PravegaDeserializer} for their custom formats which this deserializer will use. + * This also takes a transform function which is applied on the deserialized object and should transform the object + * into the type T. * * @param config serializer config - * @param deserializers Map of serialization format to corresponding deserializer. + * @param deserializers Map of serialization format to corresponding deserializer. + * @param transform a transform function that transforms the deserialized object based on the serialization format + * into an object of type T. + * @param Type of object to get back from deserializer. * @return a deserializer that can deserialize protobuf, json or avro events into java objects. */ - public static Serializer multiFormatGenericDeserializer(SerializerConfig config, Map> deserializers) { + public static Serializer deserializeAsT(SerializerConfig config, Map> deserializers, BiFunction transform) { String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : @@ -675,39 +678,8 @@ protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, S }); }); - return new MultipleFormatGenericDeserializer(groupId, schemaRegistryClient, map, config.getDecoder(), - encodingCache); - } - - /** - * A deserializer that can read data where each event could be written with different serialization formats and - * deserializes and converts them to a json string. - * - * @param config serializer config - * @return a deserializer that can deserialize protobuf, json or avro events into java objects. - */ - public static Serializer deserializeAsJsonString(SerializerConfig config) { - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - AbstractPravegaDeserializer json = new JsonGenericDeserlizer(config.getGroupId(), schemaRegistryClient, - config.getDecoder(), encodingCache); - AbstractPravegaDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), - encodingCache); - AbstractPravegaDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), - encodingCache); - - Map map = new HashMap<>(); - map.put(SerializationFormat.Json, json); - map.put(SerializationFormat.Avro, avro); - map.put(SerializationFormat.Protobuf, protobuf); - return new MultipleFormatJsonStringDeserializer(groupId, schemaRegistryClient, map, config.getDecoder(), - encodingCache); + return new MultipleFormatTransformDeserializer<>(groupId, schemaRegistryClient, map, config.getDecoder(), + encodingCache, transform); } // endregion @@ -732,4 +704,18 @@ private static void failOnCodecMismatch(SchemaRegistryClient client, SerializerC } } } + + @SneakyThrows + private static String toJsonString(SerializationFormat format, Object deserialize, JsonFormat.Printer printer, ObjectMapper objectMapper) { + switch (format) { + case Avro: + return deserialize.toString(); + case Protobuf: + return printer.print((DynamicMessage) deserialize); + case Json: + return objectMapper.writeValueAsString(((MapWithJsonSchema) deserialize).getObject()); + default: + throw new IllegalArgumentException("only avro protobuf and json can be converted to json string"); + } + } } diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index 8aa92e1e3..d019fcd4b 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -260,8 +260,8 @@ public void testJsonSerializers() { assertEquals(deserialized, user1); serialized = serializer.serialize(user1); - Serializer genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); - JSonGenericObject generic = genericDeserializer.deserialize(serialized); + Serializer genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + MapWithJsonSchema generic = genericDeserializer.deserialize(serialized); assertEquals(generic.getJsonSchema(), schema1.getSchema()); assertEquals(generic.getObject().size(), 4); @@ -290,9 +290,9 @@ public void testJsonSerializers() { Map, JSONSchema> map2 = new HashMap<>(); map2.put(DerivedUser1.class, schema1Base); - Serializer> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); + Serializer> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); serialized = multiSerializer.serialize(user1); - Either fallback = fallbackDeserializer.deserialize(serialized); + Either fallback = fallbackDeserializer.deserialize(serialized); assertTrue(fallback.isLeft()); assertEquals(fallback.getLeft(), user1); @@ -349,7 +349,7 @@ public void testMultiformatDeserializers() throws IOException { deserialized = deserializer.deserialize(serializedProto); assertTrue(deserialized instanceof DynamicMessage); deserialized = deserializer.deserialize(serializedJson); - assertTrue(deserialized instanceof JSonGenericObject); + assertTrue(deserialized instanceof MapWithJsonSchema); Serializer jsonStringDeserializer = SerializerFactory.deserializeAsJsonString(config); serializedAvro.position(0); @@ -429,9 +429,9 @@ public void testNoEncodingJson() throws IOException { serialized = serializer.serialize(user1); - Serializer genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + Serializer genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); - JSonGenericObject generic = genericDeserializer.deserialize(serialized); + MapWithJsonSchema generic = genericDeserializer.deserialize(serialized); assertNotNull(generic.getObject()); assertNull(generic.getJsonSchema()); } From cdc0c0c84eb753689d0737dd1c1fa6b705e4f31f Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Tue, 30 Jun 2020 03:21:31 -0700 Subject: [PATCH 42/70] revert rename of json generic object Signed-off-by: Shivesh Ranjan --- .../serializers/JsonGenericDeserlizer.java | 6 +++--- ...pWithJsonSchema.java => JsonGenericObject.java} | 2 +- .../serializers/SerializerFactory.java | 8 ++++---- .../schemaregistry/serializers/SerializerTest.java | 14 +++++++------- 4 files changed, 15 insertions(+), 15 deletions(-) rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{MapWithJsonSchema.java => JsonGenericObject.java} (94%) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java index cc0a342e3..bc882ae3d 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java @@ -27,7 +27,7 @@ import java.util.Map; import java.util.concurrent.ExecutionException; -class JsonGenericDeserlizer extends AbstractPravegaDeserializer { +class JsonGenericDeserlizer extends AbstractPravegaDeserializer { private final ObjectMapper objectMapper; private final LoadingCache knownSchemas; @@ -48,9 +48,9 @@ public JsonSchema load(SchemaInfo schemaInfo) throws Exception { @SneakyThrows({JsonProcessingException.class, ExecutionException.class, IOException.class}) @Override - protected MapWithJsonSchema deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + protected JsonGenericObject deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { Map obj = objectMapper.readValue(inputStream, Map.class); JsonSchema schema = writerSchemaInfo == null ? null : knownSchemas.get(writerSchemaInfo); - return new MapWithJsonSchema(obj, schema); + return new JsonGenericObject(obj, schema); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MapWithJsonSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericObject.java similarity index 94% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/MapWithJsonSchema.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericObject.java index bc9278ca3..c58c583dc 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MapWithJsonSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericObject.java @@ -15,7 +15,7 @@ import java.util.Map; @Data -public class MapWithJsonSchema { +public class JsonGenericObject { private final Map object; private final JsonSchema jsonSchema; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index e604d4353..c056c1b9a 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -279,7 +279,7 @@ public static Serializer protobufDeserializer( * @param schema Schema data that encapsulates an ProtobufSchema. * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer protobufGenericDeserializer(SerializerConfig config, ProtobufSchema schema) { + public static Serializer protobufGenericDeserializer(SerializerConfig config, @Nullable ProtobufSchema schema) { SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : config.getRegistryConfigOrClient().getRight(); @@ -433,7 +433,7 @@ public static Serializer jsonDeserializer(SerializerConfig config, JSONSc * @param config Serializer Config used for instantiating a new serializer. * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer jsonGenericDeserializer(SerializerConfig config) { + public static Serializer jsonGenericDeserializer(SerializerConfig config) { SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : config.getRegistryConfigOrClient().getRight(); @@ -525,7 +525,7 @@ public static Serializer jsonMultiTypeDeserializer( * @param Base type of schemas. * @return a Deserializer which can deserialize events of different types in the stream into typed objects. */ - public static Serializer> jsonTypedOrGenericDeserializer( + public static Serializer> jsonTypedOrGenericDeserializer( SerializerConfig config, Map, JSONSchema> schemas) { String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? @@ -713,7 +713,7 @@ private static String toJsonString(SerializationFormat format, Object deserializ case Protobuf: return printer.print((DynamicMessage) deserialize); case Json: - return objectMapper.writeValueAsString(((MapWithJsonSchema) deserialize).getObject()); + return objectMapper.writeValueAsString(((JsonGenericObject) deserialize).getObject()); default: throw new IllegalArgumentException("only avro protobuf and json can be converted to json string"); } diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index d019fcd4b..e434e2f6b 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -260,8 +260,8 @@ public void testJsonSerializers() { assertEquals(deserialized, user1); serialized = serializer.serialize(user1); - Serializer genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); - MapWithJsonSchema generic = genericDeserializer.deserialize(serialized); + Serializer genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + JsonGenericObject generic = genericDeserializer.deserialize(serialized); assertEquals(generic.getJsonSchema(), schema1.getSchema()); assertEquals(generic.getObject().size(), 4); @@ -290,9 +290,9 @@ public void testJsonSerializers() { Map, JSONSchema> map2 = new HashMap<>(); map2.put(DerivedUser1.class, schema1Base); - Serializer> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); + Serializer> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); serialized = multiSerializer.serialize(user1); - Either fallback = fallbackDeserializer.deserialize(serialized); + Either fallback = fallbackDeserializer.deserialize(serialized); assertTrue(fallback.isLeft()); assertEquals(fallback.getLeft(), user1); @@ -349,7 +349,7 @@ public void testMultiformatDeserializers() throws IOException { deserialized = deserializer.deserialize(serializedProto); assertTrue(deserialized instanceof DynamicMessage); deserialized = deserializer.deserialize(serializedJson); - assertTrue(deserialized instanceof MapWithJsonSchema); + assertTrue(deserialized instanceof JsonGenericObject); Serializer jsonStringDeserializer = SerializerFactory.deserializeAsJsonString(config); serializedAvro.position(0); @@ -429,9 +429,9 @@ public void testNoEncodingJson() throws IOException { serialized = serializer.serialize(user1); - Serializer genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + Serializer genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); - MapWithJsonSchema generic = genericDeserializer.deserialize(serialized); + JsonGenericObject generic = genericDeserializer.deserialize(serialized); assertNotNull(generic.getObject()); assertNull(generic.getJsonSchema()); } From 8693357242ac26abb013aecc43f9cdbae62ca16f Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 2 Jul 2020 00:57:47 -0700 Subject: [PATCH 43/70] refactoring serializer factory into multiple classes for readability Signed-off-by: Shivesh Ranjan --- .../schemaregistry/schemas/AvroSchema.java | 2 +- .../serializers/AvroGenericDeserlizer.java | 9 +- .../serializers/AvroSerializerFactory.java | 131 +++++ .../serializers/CustomSerializerFactory.java | 66 +++ .../serializers/JsonGenericDeserlizer.java | 22 +- .../serializers/JsonGenericObject.java | 21 - .../serializers/JsonSerializerFactory.java | 131 +++++ .../MultiFormatSerializerFactory.java | 199 ++++++++ .../MultiFormatWithSchemaDeserializer.java | 43 ++ ...r.java => MultipleFormatDeserializer.java} | 13 +- .../serializers/MultipleFormatSerializer.java | 41 ++ .../ProtobufSerializerFactory.java | 125 +++++ .../serializers/SerializerConfig.java | 4 +- .../serializers/SerializerFactory.java | 466 ++++-------------- .../serializers/SerializerFactoryHelper.java | 40 ++ .../serializers/WithSchema.java | 180 +++++++ .../serializers/SerializerTest.java | 35 +- .../testobjs/SchemaDefinitions.java | 2 +- 18 files changed, 1085 insertions(+), 445 deletions(-) create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java delete mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericObject.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{MultipleFormatTransformDeserializer.java => MultipleFormatDeserializer.java} (67%) create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java index d77b9eb2e..caaea741e 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java @@ -127,7 +127,7 @@ public static AvroSchema ofSp * @param schemaInfo Schema info object that has schema data in binary form. * @return Returns an AvroSchema with {@link GenericRecord} type. */ - public static AvroSchema from(SchemaInfo schemaInfo) { + public static AvroSchema from(SchemaInfo schemaInfo) { return new AvroSchema<>(schemaInfo); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java index 409fa72c7..9ab438238 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java @@ -19,17 +19,16 @@ import lombok.SneakyThrows; import org.apache.avro.Schema; import org.apache.avro.generic.GenericDatumReader; -import org.apache.avro.generic.GenericRecord; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.DecoderFactory; import javax.annotation.Nullable; import java.io.InputStream; -class AvroGenericDeserlizer extends AbstractPravegaDeserializer { +class AvroGenericDeserlizer extends AbstractPravegaDeserializer { private final LoadingCache knownSchemas; - AvroGenericDeserlizer(String groupId, SchemaRegistryClient client, @Nullable AvroSchema schema, + AvroGenericDeserlizer(String groupId, SchemaRegistryClient client, @Nullable AvroSchema schema, SerializerConfig.Decoder decoder, EncodingCache encodingCache) { super(groupId, client, schema, false, decoder, encodingCache); this.knownSchemas = CacheBuilder.newBuilder().build(new CacheLoader() { @@ -42,12 +41,12 @@ public Schema load(SchemaInfo schemaInfo) throws Exception { @SneakyThrows @Override - protected GenericRecord deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + protected Object deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { Preconditions.checkNotNull(writerSchemaInfo); Schema writerSchema = knownSchemas.get(writerSchemaInfo); Schema readerSchema = knownSchemas.get(readerSchemaInfo); - GenericDatumReader genericDatumReader = new GenericDatumReader<>(writerSchema, readerSchema); + GenericDatumReader genericDatumReader = new GenericDatumReader<>(writerSchema, readerSchema); BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null); return genericDatumReader.read(null, decoder); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java new file mode 100644 index 000000000..75798ca0e --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java @@ -0,0 +1,131 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.schemas.AvroSchema; +import lombok.extern.slf4j.Slf4j; + +import javax.annotation.Nullable; +import java.util.Map; +import java.util.stream.Collectors; + +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.*; + +/** + * Internal Factory class for Avro serializers and deserializers. + */ +@Slf4j +class AvroSerializerFactory { + static Serializer serializer(SerializerConfig config, AvroSchema schemaData) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemaData); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + String groupId = config.getGroupId(); + return new AvroSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), config.isRegisterSchema()); + } + + static Serializer deserializer(SerializerConfig config, AvroSchema schemaData) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemaData); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + + String groupId = config.getGroupId(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new AvroDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + } + + static Serializer genericDeserializer(SerializerConfig config, @Nullable AvroSchema schemaData) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new AvroGenericDeserlizer(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + } + + static Serializer multiTypeSerializer(SerializerConfig config, Map, AvroSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + Map, AbstractPravegaSerializer> serializerMap = schemas + .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + x -> new AvroSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), + config.isRegisterSchema()))); + return new MultiplexedSerializer<>(serializerMap); + } + + static Serializer multiTypeDeserializer( + SerializerConfig config, Map, AvroSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new AvroDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); + return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), + encodingCache); + } + + static Serializer> typedOrGenericDeserializer( + SerializerConfig config, Map, AvroSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new AvroDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); + AbstractPravegaDeserializer genericDeserializer = new AvroGenericDeserlizer(groupId, schemaRegistryClient, + null, config.getDecoder(), encodingCache); + return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, + config.getDecoder(), encodingCache); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java new file mode 100644 index 000000000..37072677a --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java @@ -0,0 +1,66 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.schemas.SchemaContainer; +import lombok.extern.slf4j.Slf4j; + +import javax.annotation.Nullable; +import java.io.InputStream; +import java.io.OutputStream; + +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.*; + +/** + * Internal Factory class for Custom serializers and deserializers. + */ +@Slf4j +class CustomSerializerFactory { + static Serializer serializer(SerializerConfig config, SchemaContainer schema, PravegaSerializer serializer) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + return new AbstractPravegaSerializer(groupId, schemaRegistryClient, + schema, config.getCodec(), config.isRegisterSchema()) { + @Override + protected void serialize(T var, SchemaInfo schema, OutputStream outputStream) { + serializer.serialize(var, schema, outputStream); + } + }; + } + + static Serializer deserializer(SerializerConfig config, @Nullable SchemaContainer schema, + PravegaDeserializer deserializer) { + + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new AbstractPravegaDeserializer(groupId, schemaRegistryClient, schema, false, + config.getDecoder(), encodingCache) { + @Override + protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + return deserializer.deserialize(inputStream, writerSchema, readerSchema); + } + }; + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java index bc882ae3d..fbbbbdb54 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java @@ -13,23 +13,16 @@ import com.fasterxml.jackson.annotation.PropertyAccessor; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.module.jsonSchema.JsonSchema; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.schemas.JSONSchema; import lombok.SneakyThrows; import java.io.IOException; import java.io.InputStream; import java.util.Map; -import java.util.concurrent.ExecutionException; -class JsonGenericDeserlizer extends AbstractPravegaDeserializer { +class JsonGenericDeserlizer extends AbstractPravegaDeserializer> { private final ObjectMapper objectMapper; - private final LoadingCache knownSchemas; JsonGenericDeserlizer(String groupId, SchemaRegistryClient client, SerializerConfig.Decoder decoder, EncodingCache encodingCache) { @@ -38,19 +31,12 @@ class JsonGenericDeserlizer extends AbstractPravegaDeserializer() { - @Override - public JsonSchema load(SchemaInfo schemaInfo) throws Exception { - return JSONSchema.from(schemaInfo).getSchema(); - } - }); } - @SneakyThrows({JsonProcessingException.class, ExecutionException.class, IOException.class}) + @SneakyThrows({JsonProcessingException.class, IOException.class}) @Override - protected JsonGenericObject deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + protected WithSchema deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { Map obj = objectMapper.readValue(inputStream, Map.class); - JsonSchema schema = writerSchemaInfo == null ? null : knownSchemas.get(writerSchemaInfo); - return new JsonGenericObject(obj, schema); + return new WithSchema<>(writerSchemaInfo, obj, (x, y) -> (Map) y); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericObject.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericObject.java deleted file mode 100644 index c58c583dc..000000000 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericObject.java +++ /dev/null @@ -1,21 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry.serializers; - -import com.fasterxml.jackson.module.jsonSchema.JsonSchema; -import lombok.Data; - -import java.util.Map; - -@Data -public class JsonGenericObject { - private final Map object; - private final JsonSchema jsonSchema; -} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java new file mode 100644 index 000000000..d3b464564 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java @@ -0,0 +1,131 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.schemas.JSONSchema; +import lombok.extern.slf4j.Slf4j; + +import java.util.Map; +import java.util.stream.Collectors; + +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.*; + +/** + * Internal Factory class for json serializers and deserializers. + */ +@Slf4j +class JsonSerializerFactory { + static Serializer serializer(SerializerConfig config, JSONSchema schemaData) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + return new JsonSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), + config.isRegisterSchema()); + } + + static Serializer deserializer(SerializerConfig config, JSONSchema schemaData) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + // schema can be null in which case deserialization will happen into dynamic message + return new JsonDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + } + + static Serializer> genericDeserializer(SerializerConfig config) { + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + + String groupId = config.getGroupId(); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new JsonGenericDeserlizer(groupId, schemaRegistryClient, config.getDecoder(), + encodingCache); + } + + static Serializer jsonStringDeserializer(SerializerConfig config) { + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + + String groupId = config.getGroupId(); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new JsonStringDeserializer(groupId, schemaRegistryClient, config.getDecoder(), encodingCache); + } + + static Serializer multiTypeSerializer( + SerializerConfig config, Map, JSONSchema> schemas) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + Map, AbstractPravegaSerializer> serializerMap = schemas + .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + x -> new JsonSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), + config.isRegisterSchema()))); + return new MultiplexedSerializer<>(serializerMap); + } + + static Serializer multiTypeDeserializer( + SerializerConfig config, Map, JSONSchema> schemas) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), + encodingCache))); + return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, + deserializerMap, config.getDecoder(), encodingCache); + } + + static Serializer>> typedOrGenericDeserializer( + SerializerConfig config, Map, JSONSchema> schemas) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); + JsonGenericDeserlizer genericDeserializer = new JsonGenericDeserlizer(groupId, schemaRegistryClient, config.getDecoder(), + encodingCache); + + return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, + deserializerMap, genericDeserializer, config.getDecoder(), encodingCache); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java new file mode 100644 index 000000000..ce3429d89 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java @@ -0,0 +1,199 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import com.google.protobuf.DynamicMessage; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.schemas.AvroSchema; +import io.pravega.schemaregistry.schemas.JSONSchema; +import io.pravega.schemaregistry.schemas.ProtobufSchema; +import lombok.extern.slf4j.Slf4j; +import org.apache.avro.generic.GenericRecord; + +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.function.BiFunction; +import java.util.function.Function; + +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.*; +import static io.pravega.schemaregistry.serializers.WithSchema.NO_TRANSFORM; + +/** + * Internal Factory class for multi format serializers and deserializers. + * These serializers can be used to work with streams when either you dont know the format beforehand or the stream allows + * for multiple formats. + */ +@Slf4j +public class MultiFormatSerializerFactory { + // region multi format + static Serializer> serializer(SerializerConfig config) { + return serializerInternal(config, Collections.emptyMap()); + } + + static Serializer> deserializerWithSchema(SerializerConfig config) { + return deserializerInternal(config, Collections.emptyMap(), NO_TRANSFORM); + } + + /** + * A deserializer that can read data where each event could be written with different serialization formats. + * Formats supported are protobuf, avro and json. + * An event serialized with avro is deserialized into {@link GenericRecord}. + * An event serialized with protobuf is deserialized into {@link DynamicMessage}. + * An event serialized with json is deserialized into {@link java.util.LinkedHashMap}. + * + * This also takes a transform function which is applied on the deserialized object and should transform the object + * into the type T. + * + * @param config serializer config + * @param transform a transform function that transforms the deserialized object based on the serialization format + * into an object of type T. + * @param Type of object to get back from deserializer. + * @return a deserializer that can deserialize protobuf, json or avro events into java objects. + */ + static Serializer deserializeAsT(SerializerConfig config, + BiFunction transform) { + return deserializeAsTInternal(config, Collections.emptyMap(), transform); + } + // endregion + + private static Serializer> serializerInternal(SerializerConfig config, + Map> customSerializers) { + Preconditions.checkNotNull(config); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + String groupId = config.getGroupId(); + + // if serializer is not already present, create a new serializer. + Function> serializerFunction = + x -> getPravegaSerializer(config, customSerializers, schemaRegistryClient, groupId, x); + return new MultipleFormatSerializer(serializerFunction); + } + + private static Serializer deserializeAsTInternal(SerializerConfig config, + Map> deserializers, + BiFunction transform) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + AbstractPravegaDeserializer json = new JsonGenericDeserlizer(config.getGroupId(), schemaRegistryClient, + config.getDecoder(), encodingCache); + AbstractPravegaDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + encodingCache); + AbstractPravegaDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + encodingCache); + + Map map = new HashMap<>(); + map.put(SerializationFormat.Json, json); + map.put(SerializationFormat.Avro, avro); + map.put(SerializationFormat.Protobuf, protobuf); + + deserializers.forEach((key, value) -> { + map.put(key, new AbstractPravegaDeserializer(groupId, schemaRegistryClient, null, false, config.getDecoder(), encodingCache) { + @Override + protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + return value.deserialize(inputStream, writerSchema, readerSchema); + } + }); + }); + + return new MultipleFormatDeserializer<>(groupId, schemaRegistryClient, map, config.getDecoder(), + encodingCache, transform); + } + + private static Serializer> deserializerInternal(SerializerConfig config, Map> deserializers, BiFunction transform) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + AbstractPravegaDeserializer json = new JsonGenericDeserlizer(config.getGroupId(), schemaRegistryClient, + config.getDecoder(), encodingCache); + AbstractPravegaDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + encodingCache); + AbstractPravegaDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + encodingCache); + + Map map = new HashMap<>(); + map.put(SerializationFormat.Json, json); + map.put(SerializationFormat.Avro, avro); + map.put(SerializationFormat.Protobuf, protobuf); + + deserializers.forEach((key, value) -> { + map.put(key, new AbstractPravegaDeserializer(groupId, schemaRegistryClient, null, false, config.getDecoder(), encodingCache) { + @Override + protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + return value.deserialize(inputStream, writerSchema, readerSchema); + } + }); + }); + + return new MultiFormatWithSchemaDeserializer<>(groupId, schemaRegistryClient, map, config.getDecoder(), + encodingCache, transform); + } + + @SuppressWarnings("unchecked") + private static AbstractPravegaSerializer getPravegaSerializer( + SerializerConfig config, Map> customSerializers, + SchemaRegistryClient schemaRegistryClient, String groupId, SchemaInfo schemaInfo) { + switch (schemaInfo.getSerializationFormat()) { + case Avro: + return new AvroSerializer<>(groupId, schemaRegistryClient, + AvroSchema.from(schemaInfo), config.getCodec(), config.isRegisterSchema()); + case Protobuf: + ProtobufSerializer m = new ProtobufSerializer<>(groupId, schemaRegistryClient, + ProtobufSchema.from(schemaInfo), config.getCodec(), config.isRegisterSchema()); + return (AbstractPravegaSerializer) m; + case Json: + return new JsonSerializer<>(groupId, schemaRegistryClient, JSONSchema.from(schemaInfo), + config.getCodec(), config.isRegisterSchema()); + case Custom: + return getCustomSerializer(config, customSerializers, schemaRegistryClient, groupId, schemaInfo); + default: + throw new IllegalArgumentException("Serializer not provided"); + } + } + + private static AbstractPravegaSerializer getCustomSerializer( + SerializerConfig config, Map> customSerializers, + SchemaRegistryClient schemaRegistryClient, String groupId, SchemaInfo schemaInfo) { + if (customSerializers.containsKey(schemaInfo.getSerializationFormat())) { + PravegaSerializer serializer = customSerializers.get(schemaInfo.getSerializationFormat()); + return new AbstractPravegaSerializer(groupId, schemaRegistryClient, + () -> schemaInfo, config.getCodec(), config.isRegisterSchema()) { + @Override + protected void serialize(Object var, SchemaInfo schema, OutputStream outputStream) { + serializer.serialize(var, schema, outputStream); + } + }; + } else { + throw new IllegalArgumentException("Serializer for the format not supplied"); + } + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java new file mode 100644 index 000000000..5da2b415e --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java @@ -0,0 +1,43 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.common.base.Preconditions; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; + +import java.io.InputStream; +import java.util.Map; +import java.util.function.BiFunction; + +class MultiFormatWithSchemaDeserializer extends AbstractPravegaDeserializer> { + private final Map genericDeserializers; + private final BiFunction transform; + + MultiFormatWithSchemaDeserializer(String groupId, SchemaRegistryClient client, + Map genericDeserializers, + SerializerConfig.Decoder decoder, + EncodingCache encodingCache, BiFunction transform) { + super(groupId, client, null, false, decoder, encodingCache); + this.genericDeserializers = genericDeserializers; + this.transform = transform; + } + + @Override + protected WithSchema deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + Preconditions.checkNotNull(writerSchema); + Object obj = genericDeserializers.get(writerSchema.getSerializationFormat()).deserialize(inputStream, writerSchema, readerSchema); + if (obj instanceof WithSchema) { + obj = ((WithSchema) obj).getObject(); + } + return new WithSchema<>(writerSchema, obj, transform); + } +} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatTransformDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java similarity index 67% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatTransformDeserializer.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java index 0098cb7d6..3e63a291f 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatTransformDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java @@ -18,14 +18,14 @@ import java.util.Map; import java.util.function.BiFunction; -class MultipleFormatTransformDeserializer extends AbstractPravegaDeserializer { +class MultipleFormatDeserializer extends AbstractPravegaDeserializer { private final Map genericDeserializers; private final BiFunction transform; - MultipleFormatTransformDeserializer(String groupId, SchemaRegistryClient client, - Map genericDeserializers, - SerializerConfig.Decoder decoder, - EncodingCache encodingCache, BiFunction transform) { + MultipleFormatDeserializer(String groupId, SchemaRegistryClient client, + Map genericDeserializers, + SerializerConfig.Decoder decoder, + EncodingCache encodingCache, BiFunction transform) { super(groupId, client, null, false, decoder, encodingCache); this.genericDeserializers = genericDeserializers; this.transform = transform; @@ -34,6 +34,7 @@ class MultipleFormatTransformDeserializer extends AbstractPravegaDeserializer @Override protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { Preconditions.checkNotNull(writerSchema); - return transform.apply(writerSchema.getSerializationFormat(), genericDeserializers.get(writerSchema.getSerializationFormat()).deserialize(inputStream, writerSchema, readerSchema)); + return transform.apply(writerSchema.getSerializationFormat(), + genericDeserializers.get(writerSchema.getSerializationFormat()).deserialize(inputStream, writerSchema, readerSchema)); } } \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java new file mode 100644 index 000000000..df249f66c --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java @@ -0,0 +1,41 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import org.apache.commons.lang3.NotImplementedException; + +import java.nio.ByteBuffer; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Function; + +class MultipleFormatSerializer implements Serializer> { + private final Function> serializerFunction; + + private final ConcurrentHashMap> serializersMap; + + MultipleFormatSerializer(Function> serializerFunction) { + this.serializerFunction = serializerFunction; + this.serializersMap = new ConcurrentHashMap<>(); + } + + @Override + public ByteBuffer serialize(WithSchema value) { + AbstractPravegaSerializer serializer = serializersMap.computeIfAbsent(value.getSchemaContainer().getSchemaInfo(), + x -> serializerFunction.apply(value.getSchemaContainer().getSchemaInfo())); + return serializer.serialize(value.getObject()); + } + + @Override + public WithSchema deserialize(ByteBuffer serializedValue) { + throw new NotImplementedException("Deserializer not implemented"); + } +} \ No newline at end of file diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java new file mode 100644 index 000000000..0e666d7ec --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java @@ -0,0 +1,125 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; +import com.google.protobuf.Message; +import io.pravega.client.stream.Serializer; +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; +import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.schemas.ProtobufSchema; +import lombok.extern.slf4j.Slf4j; + +import javax.annotation.Nullable; +import java.util.Map; +import java.util.stream.Collectors; + +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.*; + +/** + * Internal Factory class for protobuf serializers and deserializers. + */ +@Slf4j +class ProtobufSerializerFactory { + static Serializer serializer(SerializerConfig config, + ProtobufSchema schemaData) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + return new ProtobufSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), + config.isRegisterSchema()); + } + + static Serializer deserializer(SerializerConfig config, + ProtobufSchema schemaData) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + // schema can be null in which case deserialization will happen into dynamic message + return new ProtobufDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + } + + static Serializer genericDeserializer(SerializerConfig config, @Nullable ProtobufSchema schema) { + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + + String groupId = config.getGroupId(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + return new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache); + } + + static Serializer multiTypeSerializer( + SerializerConfig config, Map, ProtobufSchema> schemas) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + Map, AbstractPravegaSerializer> serializerMap = schemas + .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + x -> new ProtobufSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), + config.isRegisterSchema()))); + return new MultiplexedSerializer<>(serializerMap); + } + + static Serializer multiTypeDeserializer( + SerializerConfig config, Map, ProtobufSchema> schemas) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); + return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), encodingCache); + } + + static Serializer> typedOrGenericDeserializer( + SerializerConfig config, Map, ProtobufSchema> schemas) { + String groupId = config.getGroupId(); + SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + autoCreateGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + + EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); + + Map> deserializerMap = schemas + .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), + x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); + ProtobufGenericDeserlizer genericDeserializer = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, + config.getDecoder(), encodingCache); + return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, + config.getDecoder(), encodingCache); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index b5364e283..ba12f673d 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -152,7 +152,9 @@ public SerializerConfigBuilder createGroup(SerializationFormat serializationForm * @return Builder */ public SerializerConfigBuilder createGroup(SerializationFormat serializationFormat, boolean allowMultipleTypes) { - return createGroup(serializationFormat, Compatibility.fullTransitive(), allowMultipleTypes); + Compatibility policy = serializationFormat.equals(SerializationFormat.Any) ? Compatibility.allowAny() : + Compatibility.fullTransitive(); + return createGroup(serializationFormat, policy, allowMultipleTypes); } /** diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index c056c1b9a..a48ff4737 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -9,44 +9,33 @@ */ package io.pravega.schemaregistry.serializers; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Preconditions; import com.google.protobuf.DynamicMessage; import com.google.protobuf.GeneratedMessageV3; import com.google.protobuf.Message; -import com.google.protobuf.util.JsonFormat; import io.pravega.client.stream.Serializer; -import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; import io.pravega.schemaregistry.common.Either; import io.pravega.schemaregistry.contract.data.EncodingInfo; -import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; import io.pravega.schemaregistry.schemas.AvroSchema; import io.pravega.schemaregistry.schemas.JSONSchema; import io.pravega.schemaregistry.schemas.ProtobufSchema; import io.pravega.schemaregistry.schemas.SchemaContainer; -import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; import javax.annotation.Nullable; -import java.io.InputStream; -import java.io.OutputStream; import java.nio.ByteBuffer; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.function.BiFunction; -import java.util.stream.Collectors; + +import static io.pravega.schemaregistry.serializers.WithSchema.JSON_TRANSFORM; +import static io.pravega.schemaregistry.serializers.WithSchema.NO_TRANSFORM; @Slf4j public class SerializerFactory { public static final String ENCODE = "encode"; // region avro - /** * Creates a typed avro serializer for the Schema. The serializer implementation returned from this method is * responsible for interacting with schema registry service and ensures that only valid registered schema can be used. @@ -55,21 +44,13 @@ public class SerializerFactory { * It does not implement {@link Serializer#deserialize(ByteBuffer)}. * * @param config Serializer Config used for instantiating a new serializer. - * @param schemaData Schema data that encapsulates an AvroSchema + * @param schemaContainer Schema container that encapsulates an AvroSchema * @param Type of event. It accepts either POJO or Avro generated classes and serializes them. * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. */ - public static Serializer avroSerializer(SerializerConfig config, AvroSchema schemaData) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemaData); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - registerCodec(schemaRegistryClient, config); - String groupId = config.getGroupId(); - return new AvroSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), config.isRegisterSchema()); + public static Serializer avroSerializer(SerializerConfig config, AvroSchema schemaContainer) { + return AvroSerializerFactory.serializer(config, schemaContainer); } /** @@ -80,24 +61,12 @@ public static Serializer avroSerializer(SerializerConfig config, AvroSche * It does not implement {@link Serializer#serialize(Object)}. * * @param config Serializer Config used for instantiating a new serializer. - * @param schemaData Schema data that encapsulates an AvroSchema + * @param schemaContainer Schema container that encapsulates an AvroSchema * @param Type of event. The typed event should be an avro generated class. For generic type use {@link #avroGenericDeserializer} * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer avroDeserializer(SerializerConfig config, AvroSchema schemaData) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemaData); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - - String groupId = config.getGroupId(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - return new AvroDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + public static Serializer avroDeserializer(SerializerConfig config, AvroSchema schemaContainer) { + return AvroSerializerFactory.deserializer(config, schemaContainer); } /** @@ -108,21 +77,12 @@ public static Serializer avroDeserializer(SerializerConfig config, AvroSc * It does not implement {@link Serializer#serialize(Object)}. * * @param config Serializer Config used for instantiating a new serializer. - * @param schemaData Schema data that encapsulates an AvroSchema. It can be null to indicate that writer schema should + * @param schemaContainer Schema container that encapsulates an AvroSchema. It can be null to indicate that writer schema should * be used for deserialization. * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer avroGenericDeserializer(SerializerConfig config, - @Nullable AvroSchema schemaData) { - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - return new AvroGenericDeserlizer(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + public static Serializer avroGenericDeserializer(SerializerConfig config, @Nullable AvroSchema schemaContainer) { + return AvroSerializerFactory.genericDeserializer(config, schemaContainer); } /** @@ -134,20 +94,7 @@ public static Serializer avroGenericDeserializer(SerializerConfig * @return a Serializer which can serialize events of different types for which schemas are supplied. */ public static Serializer avroMultiTypeSerializer(SerializerConfig config, Map, AvroSchema> schemas) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemas); - - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - registerCodec(schemaRegistryClient, config); - Map, AbstractPravegaSerializer> serializerMap = schemas - .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, - x -> new AvroSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), - config.isRegisterSchema()))); - return new MultiplexedSerializer<>(serializerMap); + return AvroSerializerFactory.multiTypeSerializer(config, schemas); } /** @@ -161,23 +108,7 @@ public static Serializer avroMultiTypeSerializer(SerializerConfig config, */ public static Serializer avroMultiTypeDeserializer( SerializerConfig config, Map, AvroSchema> schemas) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemas); - - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - Map> deserializerMap = schemas - .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new AvroDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); - return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), - encodingCache); + return AvroSerializerFactory.multiTypeDeserializer(config, schemas); } /** @@ -190,27 +121,9 @@ public static Serializer avroMultiTypeDeserializer( * @return a Deserializer which can deserialize events of different types in the stream into typed objects or a generic * object */ - public static Serializer> avroTypedOrGenericDeserializer( + public static Serializer> avroTypedOrGenericDeserializer( SerializerConfig config, Map, AvroSchema> schemas) { - Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemas); - - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - Map> deserializerMap = schemas - .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new AvroDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); - AbstractPravegaDeserializer genericDeserializer = new AvroGenericDeserlizer(groupId, schemaRegistryClient, - null, config.getDecoder(), encodingCache); - return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, - config.getDecoder(), encodingCache); + return AvroSerializerFactory.typedOrGenericDeserializer(config, schemas); } // endregion @@ -224,21 +137,14 @@ public static Serializer> avroTypedOrGenericDeseria * It does not implement {@link Serializer#deserialize(ByteBuffer)}. * * @param config Serializer Config used for instantiating a new serializer. - * @param schemaData Schema data that encapsulates an Protobuf Schema. + * @param schemaContainer Schema container that encapsulates an Protobuf Schema. * @param Type of event. * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. */ public static Serializer protobufSerializer(SerializerConfig config, - ProtobufSchema schemaData) { - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - registerCodec(schemaRegistryClient, config); - return new ProtobufSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), - config.isRegisterSchema()); + ProtobufSchema schemaContainer) { + return ProtobufSerializerFactory.serializer(config, schemaContainer); } /** @@ -249,23 +155,13 @@ public static Serializer protobufSerializer(SerializerCon * It does not implement {@link Serializer#serialize(Object)}. * * @param config Serializer Config used for instantiating a new serializer. - * @param schemaData Schema data that encapsulates an ProtobufSchema + * @param schemaContainer Schema container that encapsulates an ProtobufSchema * @param Type of event. The typed event should be an avro generated class. For generic type use {@link #protobufGenericDeserializer} * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ public static Serializer protobufDeserializer(SerializerConfig config, - ProtobufSchema schemaData) { - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - // schema can be null in which case deserialization will happen into dynamic message - return new ProtobufDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + ProtobufSchema schemaContainer) { + return ProtobufSerializerFactory.deserializer(config, schemaContainer); } /** @@ -276,21 +172,12 @@ public static Serializer protobufDeserializer( * It does not implement {@link Serializer#serialize(Object)}. * * @param config Serializer Config used for instantiating a new serializer. - * @param schema Schema data that encapsulates an ProtobufSchema. + * @param schema Schema container that encapsulates an ProtobufSchema. * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer protobufGenericDeserializer(SerializerConfig config, @Nullable ProtobufSchema schema) { - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - - String groupId = config.getGroupId(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - return new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache); + public static Serializer protobufGenericDeserializer(SerializerConfig config, + @Nullable ProtobufSchema schema) { + return ProtobufSerializerFactory.genericDeserializer(config, schema); } /** @@ -303,17 +190,7 @@ public static Serializer protobufGenericDeserializer(SerializerC */ public static Serializer protobufMultiTypeSerializer( SerializerConfig config, Map, ProtobufSchema> schemas) { - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - registerCodec(schemaRegistryClient, config); - Map, AbstractPravegaSerializer> serializerMap = schemas - .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, - x -> new ProtobufSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), - config.isRegisterSchema()))); - return new MultiplexedSerializer<>(serializerMap); + return ProtobufSerializerFactory.multiTypeSerializer(config, schemas); } /** @@ -327,19 +204,7 @@ public static Serializer protobufMultiTypeSeri */ public static Serializer protobufMultiTypeDeserializer( SerializerConfig config, Map, ProtobufSchema> schemas) { - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - Map> deserializerMap = schemas - .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); - return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), encodingCache); + return ProtobufSerializerFactory.multiTypeDeserializer(config, schemas); } /** @@ -353,22 +218,7 @@ public static Serializer protobufMultiTypeDese */ public static Serializer> protobufTypedOrGenericDeserializer( SerializerConfig config, Map, ProtobufSchema> schemas) { - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - Map> deserializerMap = schemas - .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); - ProtobufGenericDeserlizer genericDeserializer = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, - config.getDecoder(), encodingCache); - return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, - config.getDecoder(), encodingCache); + return ProtobufSerializerFactory.typedOrGenericDeserializer(config, schemas); } //endregion @@ -382,20 +232,13 @@ public static Serializer Type of event. * @return A Serializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamWriter} or * {@link io.pravega.client.stream.TransactionalEventStreamWriter}. */ - public static Serializer jsonSerializer(SerializerConfig config, JSONSchema schemaData) { - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - registerCodec(schemaRegistryClient, config); - return new JsonSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), - config.isRegisterSchema()); + public static Serializer jsonSerializer(SerializerConfig config, JSONSchema schemaContainer) { + return JsonSerializerFactory.serializer(config, schemaContainer); } /** @@ -406,22 +249,12 @@ public static Serializer jsonSerializer(SerializerConfig config, JSONSche * It does not implement {@link Serializer#serialize(Object)}. * * @param config Serializer Config used for instantiating a new serializer. - * @param schemaData Schema data that encapsulates an JSONSchema + * @param schemaContainer Schema container that encapsulates an JSONSchema * @param Type of event. The typed event should be an avro generated class. For generic type use {@link #jsonGenericDeserializer} * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer jsonDeserializer(SerializerConfig config, JSONSchema schemaData) { - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - // schema can be null in which case deserialization will happen into dynamic message - return new JsonDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + public static Serializer jsonDeserializer(SerializerConfig config, JSONSchema schemaContainer) { + return JsonSerializerFactory.deserializer(config, schemaContainer); } /** @@ -433,17 +266,8 @@ public static Serializer jsonDeserializer(SerializerConfig config, JSONSc * @param config Serializer Config used for instantiating a new serializer. * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer jsonGenericDeserializer(SerializerConfig config) { - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - - String groupId = config.getGroupId(); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - return new JsonGenericDeserlizer(groupId, schemaRegistryClient, config.getDecoder(), - encodingCache); + public static Serializer> jsonGenericDeserializer(SerializerConfig config) { + return JsonSerializerFactory.genericDeserializer(config); } /** @@ -456,15 +280,7 @@ public static Serializer jsonGenericDeserializer(SerializerCo * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ public static Serializer jsonStringDeserializer(SerializerConfig config) { - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - - String groupId = config.getGroupId(); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - return new JsonStringDeserializer(groupId, schemaRegistryClient, config.getDecoder(), encodingCache); + return JsonSerializerFactory.jsonStringDeserializer(config); } /** @@ -477,17 +293,7 @@ public static Serializer jsonStringDeserializer(SerializerConfig config) */ public static Serializer jsonMultiTypeSerializer( SerializerConfig config, Map, JSONSchema> schemas) { - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - registerCodec(schemaRegistryClient, config); - Map, AbstractPravegaSerializer> serializerMap = schemas - .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, - x -> new JsonSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), - config.isRegisterSchema()))); - return new MultiplexedSerializer<>(serializerMap); + return JsonSerializerFactory.multiTypeSerializer(config, schemas); } /** @@ -501,19 +307,7 @@ public static Serializer jsonMultiTypeSerializer( */ public static Serializer jsonMultiTypeDeserializer( SerializerConfig config, Map, JSONSchema> schemas) { - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - Map> deserializerMap = schemas - .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), - encodingCache))); - return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, - deserializerMap, config.getDecoder(), encodingCache); + return JsonSerializerFactory.multiTypeDeserializer(config, schemas); } /** @@ -525,25 +319,9 @@ public static Serializer jsonMultiTypeDeserializer( * @param Base type of schemas. * @return a Deserializer which can deserialize events of different types in the stream into typed objects. */ - public static Serializer> jsonTypedOrGenericDeserializer( + public static Serializer>> jsonTypedOrGenericDeserializer( SerializerConfig config, Map, JSONSchema> schemas) { - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - Map> deserializerMap = schemas - .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); - JsonGenericDeserlizer genericDeserializer = new JsonGenericDeserlizer(groupId, schemaRegistryClient, config.getDecoder(), - encodingCache); - - return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, - deserializerMap, genericDeserializer, config.getDecoder(), encodingCache); + return JsonSerializerFactory.typedOrGenericDeserializer(config, schemas); } //endregion @@ -560,20 +338,7 @@ public static Serializer> jsonTypedOrGenericDes * @return Serializer that uses user supplied serialization function for serializing events. */ public static Serializer customSerializer(SerializerConfig config, SchemaContainer schema, PravegaSerializer serializer) { - - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - registerCodec(schemaRegistryClient, config); - return new AbstractPravegaSerializer(groupId, schemaRegistryClient, - schema, config.getCodec(), config.isRegisterSchema()) { - @Override - protected void serialize(T var, SchemaInfo schema, OutputStream outputStream) { - serializer.serialize(var, schema, outputStream); - } - }; + return CustomSerializerFactory.serializer(config, schema, serializer); } /** @@ -588,134 +353,85 @@ protected void serialize(T var, SchemaInfo schema, OutputStream outputStream) { */ public static Serializer customDeserializer(SerializerConfig config, @Nullable SchemaContainer schema, PravegaDeserializer deserializer) { - - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - return new AbstractPravegaDeserializer(groupId, schemaRegistryClient, schema, false, - config.getDecoder(), encodingCache) { - @Override - protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { - return deserializer.deserialize(inputStream, writerSchema, readerSchema); - } - }; + return CustomSerializerFactory.deserializer(config, schema, deserializer); } // endregion - // region multi format deserializer + // region multiformat + /** + * A multi format serializer that takes objects with schemas for the three supported formats - avro, protobuf and json. + * It then serializes the object using the format specific serializer. The events are supplied using an encapsulating + * object called WithSchema which has both the event and the schema. + * It only serializes the events while ensuring that the corresponding schema was registered with the service. + * If {@link SerializerConfig#registerSchema} is set to true, it registers the schema before using it. + * This serializer contacts schema registry once for every new schema that it encounters, and it fetches the + * encoding id for the schema and codec pair. + * + * @param config Serializer config + * @return A multi format serializer which serializes events from all three of Avro, Protobuf and json formats. + */ + public static Serializer> serializerWithSchema(SerializerConfig config) { + return MultiFormatSerializerFactory.serializer(config); + } + + /** + * A deserializer that can deserialize data where each event could be written with either of avro, protobuf or json + * serialization formats. It deserializes them into format specific generic objects. + * An event serialized with avro is deserialized into {@link GenericRecord}. + * An event serialized with protobuf is deserialized into {@link DynamicMessage}. + * An event serialized with json is deserialized into {@link java.util.LinkedHashMap}. + * + * @param config serializer config + * @return a deserializer that can deserialize events serialized as protobuf, json or avro into java objects. + */ + public static Serializer> deserializerWithSchema(SerializerConfig config) { + return MultiFormatSerializerFactory.deserializerWithSchema(config); + } + /** * A deserializer that can read data where each event could be written with either of avro, protobuf or json * serialization formats. + * An event serialized with avro is deserialized into {@link GenericRecord}. + * An event serialized with protobuf is deserialized into {@link DynamicMessage}. + * An event serialized with json is deserialized into {@link java.util.LinkedHashMap}. * * @param config serializer config * @return a deserializer that can deserialize events serialized as protobuf, json or avro into java objects. */ - public static Serializer multiFormatGenericDeserializer(SerializerConfig config) { - return deserializeAsT(config, Collections.emptyMap(), (x, y) -> y); + public static Serializer genericDeserializer(SerializerConfig config) { + return deserializeAsT(config, NO_TRANSFORM); } /** - * A deserializer that can read data where each event could be written with different serialization formats and - * deserializes and converts them to a json string. + * This is a convenience serializer shortcut that calls {@link #deserializeAsT} with a transform to + * convert the object to JSON string. * * @param config serializer config * @return a deserializer that can deserialize protobuf, json or avro events into java objects. */ public static Serializer deserializeAsJsonString(SerializerConfig config) { - JsonFormat.Printer protoBufPrinter = JsonFormat.printer().preservingProtoFieldNames().usingTypeRegistry(JsonFormat.TypeRegistry.newBuilder().build()); - ObjectMapper objectMapper = new ObjectMapper(); - return deserializeAsT(config, Collections.emptyMap(), (x, y) -> toJsonString(x, y, protoBufPrinter, objectMapper)); + return deserializeAsT(config, JSON_TRANSFORM); } /** * A deserializer that can read data where each event could be written with different serialization formats. - * It has built in deserializers for protobuf, avro and json. In addition to it, users can supply - * deserializers {@link PravegaDeserializer} for their custom formats which this deserializer will use. + * Formats supported are protobuf, avro and json. + * An event serialized with avro is deserialized into {@link GenericRecord}. + * An event serialized with protobuf is deserialized into {@link DynamicMessage}. + * An event serialized with json is deserialized into {@link java.util.LinkedHashMap}. + * * This also takes a transform function which is applied on the deserialized object and should transform the object * into the type T. * * @param config serializer config - * @param deserializers Map of serialization format to corresponding deserializer. * @param transform a transform function that transforms the deserialized object based on the serialization format * into an object of type T. * @param Type of object to get back from deserializer. * @return a deserializer that can deserialize protobuf, json or avro events into java objects. */ - public static Serializer deserializeAsT(SerializerConfig config, Map> deserializers, BiFunction transform) { - String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - - AbstractPravegaDeserializer json = new JsonGenericDeserlizer(config.getGroupId(), schemaRegistryClient, - config.getDecoder(), encodingCache); - AbstractPravegaDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), - encodingCache); - AbstractPravegaDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), - encodingCache); - - Map map = new HashMap<>(); - map.put(SerializationFormat.Json, json); - map.put(SerializationFormat.Avro, avro); - map.put(SerializationFormat.Protobuf, protobuf); - - deserializers.forEach((key, value) -> { - map.put(key, new AbstractPravegaDeserializer(groupId, schemaRegistryClient, null, false, config.getDecoder(), encodingCache) { - @Override - protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { - return value.deserialize(inputStream, writerSchema, readerSchema); - } - }); - }); - - return new MultipleFormatTransformDeserializer<>(groupId, schemaRegistryClient, map, config.getDecoder(), - encodingCache, transform); + public static Serializer deserializeAsT(SerializerConfig config, + BiFunction transform) { + return MultiFormatSerializerFactory.deserializeAsT(config, transform); } // endregion - - private static void autoCreateGroup(SchemaRegistryClient client, SerializerConfig config) { - if (config.isCreateGroup()) { - client.addGroup(config.getGroupId(), config.getGroupProperties()); - } - } - - private static void registerCodec(SchemaRegistryClient client, SerializerConfig config) { - if (config.isRegisterCodec()) { - client.addCodecType(config.getGroupId(), config.getCodec().getCodecType()); - } - } - - private static void failOnCodecMismatch(SchemaRegistryClient client, SerializerConfig config) { - if (config.isFailOnCodecMismatch()) { - List codecTypesInGroup = client.getCodecTypes(config.getGroupId()); - if (!config.getDecoder().getCodecTypes().containsAll(codecTypesInGroup)) { - log.warn("Not all CodecTypes are supported by reader. Required codecTypes = {}", codecTypesInGroup); - throw new RuntimeException(String.format("Need all codecTypes in %s", codecTypesInGroup.toString())); - } - } - } - - @SneakyThrows - private static String toJsonString(SerializationFormat format, Object deserialize, JsonFormat.Printer printer, ObjectMapper objectMapper) { - switch (format) { - case Avro: - return deserialize.toString(); - case Protobuf: - return printer.print((DynamicMessage) deserialize); - case Json: - return objectMapper.writeValueAsString(((JsonGenericObject) deserialize).getObject()); - default: - throw new IllegalArgumentException("only avro protobuf and json can be converted to json string"); - } - } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java new file mode 100644 index 000000000..04649f4ba --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java @@ -0,0 +1,40 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.schemaregistry.client.SchemaRegistryClient; +import lombok.extern.slf4j.Slf4j; + +import java.util.List; + +@Slf4j +class SerializerFactoryHelper { + static void autoCreateGroup(SchemaRegistryClient client, SerializerConfig config) { + if (config.isCreateGroup()) { + client.addGroup(config.getGroupId(), config.getGroupProperties()); + } + } + + static void registerCodec(SchemaRegistryClient client, SerializerConfig config) { + if (config.isRegisterCodec()) { + client.addCodecType(config.getGroupId(), config.getCodec().getCodecType()); + } + } + + static void failOnCodecMismatch(SchemaRegistryClient client, SerializerConfig config) { + if (config.isFailOnCodecMismatch()) { + List codecTypesInGroup = client.getCodecTypes(config.getGroupId()); + if (!config.getDecoder().getCodecTypes().containsAll(codecTypesInGroup)) { + log.warn("Not all CodecTypes are supported by reader. Required codecTypes = {}", codecTypesInGroup); + throw new RuntimeException(String.format("Need all codecTypes in %s", codecTypesInGroup.toString())); + } + } + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java new file mode 100644 index 000000000..be17749a0 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java @@ -0,0 +1,180 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.module.jsonSchema.JsonSchema; +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.util.JsonFormat; +import io.pravega.schemaregistry.contract.data.SchemaInfo; +import io.pravega.schemaregistry.contract.data.SerializationFormat; +import io.pravega.schemaregistry.schemas.AvroSchema; +import io.pravega.schemaregistry.schemas.JSONSchema; +import io.pravega.schemaregistry.schemas.ProtobufSchema; +import io.pravega.schemaregistry.schemas.SchemaContainer; +import lombok.AccessLevel; +import lombok.Getter; +import lombok.SneakyThrows; +import org.apache.avro.Schema; +import org.apache.avro.generic.IndexedRecord; + +import java.util.function.BiFunction; + +/** + * Container class for object with its corresponding schema. + * @param Type of object. + */ +public class WithSchema { + public static final BiFunction JSON_TRANSFORM = WithSchema::toJsonString; + public static final BiFunction NO_TRANSFORM = (x, y) -> y; + + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + private static final JsonFormat.Printer PRINTER = JsonFormat.printer().preservingProtoFieldNames() + .usingTypeRegistry(JsonFormat.TypeRegistry.newBuilder().build()); + + @Getter(AccessLevel.PACKAGE) + private final SchemaContainer schemaContainer; + @Getter + private final Object object; + private final BiFunction transform; + + WithSchema(SchemaInfo schemaInfo, Object obj, BiFunction transform) { + this.object = obj; + this.transform = transform; + if (schemaInfo != null) { + switch (schemaInfo.getSerializationFormat()) { + case Avro: + schemaContainer = AvroSchema.from(schemaInfo); + break; + case Protobuf: + schemaContainer = ProtobufSchema.from(schemaInfo); + break; + case Json: + schemaContainer = JSONSchema.from(schemaInfo); + break; + case Custom: + schemaContainer = () -> schemaInfo; + break; + default: + throw new IllegalArgumentException("Serialization format not supported"); + } + } else { + schemaContainer = null; + } + } + + /** + * Check whether the schema is of type Avro. + * + * @return True if the schema is for avro, false otherwise. + */ + public boolean hasAvroSchema() { + return schemaContainer instanceof AvroSchema; + } + + /** + * Avro Schema for the underlying deserialized object. This is available if {@link this#hasAvroSchema()} returns true. + * This means underlying object was serialized as avro. + * + * @return Protobuf {@link Schema} representing the schema for the object. + */ + @SuppressWarnings("unchecked") + public Schema getAvroSchema() { + return ((AvroSchema) schemaContainer).getSchema(); + } + + /** + * Check whether the schema is of type Protobuf. + * + * @return True if the schema is for protobuf, false otherwise. + */ + public boolean hasProtobufSchema() { + return schemaContainer instanceof ProtobufSchema; + } + + /** + * Protobuf Schema for the underlying deserialized object. This is available if {@link this#hasProtobufSchema()} returns true. + * This means underlying object was serialized as protobuf. + * + * @return Protobuf {@link com.google.protobuf.DescriptorProtos.FileDescriptorSet} representing the schema for the object. + */ + @SuppressWarnings("unchecked") + public DescriptorProtos.FileDescriptorSet getProtobufSchema() { + return ((ProtobufSchema) schemaContainer).getDescriptorProto(); + } + + /** + * Check whether the schema is of type Json. + * + * @return True if the schema is for json, false otherwise + */ + public boolean hasJsonSchema() { + return schemaContainer instanceof JSONSchema; + } + + /** + * Json Schema for the underlying deserialized object. This is available if {@link this#hasJsonSchema()} returns true. + * This means underlying object was serialized as Json. + * + * @return Protobuf {@link JsonSchema} representing the schema for the object. + */ + @SuppressWarnings("unchecked") + public JsonSchema getJsonSchema() { + return ((JSONSchema) schemaContainer).getSchema(); + } + + /** + * Applies the transform on the deserialized object. + * + * @return Transformed object of type T. + */ + public T getTransformed() { + if (schemaContainer == null) { + throw new IllegalArgumentException(); + } + return transform.apply(schemaContainer.getSchemaInfo().getSerializationFormat(), object); + } + + /** + * Applies JsonString transformation to convert the deserialized object into a json string. + * + * @return Json String for the object. + */ + public String getJsonString() { + if (schemaContainer == null) { + throw new IllegalArgumentException(); + } + return JSON_TRANSFORM.apply(schemaContainer.getSchemaInfo().getSerializationFormat(), object); + } + + @SneakyThrows + private static String toJsonString(SerializationFormat format, Object deserialize) { + String jsonString; + switch (format) { + case Avro: + if (deserialize instanceof IndexedRecord) { + jsonString = deserialize.toString(); + } else { + jsonString = OBJECT_MAPPER.writeValueAsString(deserialize); + } + break; + case Protobuf: + jsonString = PRINTER.print((DynamicMessage) deserialize); + break; + case Json: + jsonString = OBJECT_MAPPER.writeValueAsString(((WithSchema) deserialize).object); + break; + default: + jsonString = OBJECT_MAPPER.writeValueAsString(deserialize); + } + return jsonString; + } +} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index e434e2f6b..bcbad6943 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -101,10 +101,11 @@ public void testAvroSerializers() { assertEquals(deserialized, test1); serialized = serializer.serialize(test1); - Serializer genericDeserializer = SerializerFactory.avroGenericDeserializer(config, null); - GenericRecord genericDeserialized = genericDeserializer.deserialize(serialized); - assertEquals(genericDeserialized.get("name").toString(), "name"); - assertEquals(genericDeserialized.get("field1"), 1); + Serializer genericDeserializer = SerializerFactory.avroGenericDeserializer(config, null); + Object genericDeserialized = genericDeserializer.deserialize(serialized); + assertTrue(genericDeserialized instanceof GenericRecord); + assertEquals(((GenericRecord) genericDeserialized).get("name").toString(), "name"); + assertEquals(((GenericRecord) genericDeserialized).get("field1"), 1); // multi type Test2 test2 = new Test2("name", 1, "2"); @@ -126,10 +127,10 @@ public void testAvroSerializers() { Map, AvroSchema> map2 = new HashMap<>(); map2.put(Test1.class, schema1Base); - Serializer> fallbackDeserializer = SerializerFactory.avroTypedOrGenericDeserializer(config, map2); + Serializer> fallbackDeserializer = SerializerFactory.avroTypedOrGenericDeserializer(config, map2); serialized = multiSerializer.serialize(test1); - Either fallback = fallbackDeserializer.deserialize(serialized); + Either fallback = fallbackDeserializer.deserialize(serialized); assertTrue(fallback.isLeft()); assertEquals(fallback.getLeft(), test1); @@ -260,10 +261,10 @@ public void testJsonSerializers() { assertEquals(deserialized, user1); serialized = serializer.serialize(user1); - Serializer genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); - JsonGenericObject generic = genericDeserializer.deserialize(serialized); - assertEquals(generic.getJsonSchema(), schema1.getSchema()); - assertEquals(generic.getObject().size(), 4); + Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + WithSchema generic = genericDeserializer.deserialize(serialized); + assertEquals(((JSONSchema) generic.getSchemaContainer()).getSchema(), schema1.getSchema()); + assertEquals(((Map) generic.getObject()).size(), 4); serialized = serializer.serialize(user1); Serializer stringDeserializer = SerializerFactory.jsonStringDeserializer(config); @@ -290,9 +291,9 @@ public void testJsonSerializers() { Map, JSONSchema> map2 = new HashMap<>(); map2.put(DerivedUser1.class, schema1Base); - Serializer> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); + Serializer>> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); serialized = multiSerializer.serialize(user1); - Either fallback = fallbackDeserializer.deserialize(serialized); + Either> fallback = fallbackDeserializer.deserialize(serialized); assertTrue(fallback.isLeft()); assertEquals(fallback.getLeft(), user1); @@ -343,13 +344,13 @@ public void testMultiformatDeserializers() throws IOException { DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); ByteBuffer serializedJson = jsonSerializer.serialize(user1); - Serializer deserializer = SerializerFactory.multiFormatGenericDeserializer(config); + Serializer deserializer = SerializerFactory.genericDeserializer(config); Object deserialized = deserializer.deserialize(serializedAvro); assertTrue(deserialized instanceof GenericRecord); deserialized = deserializer.deserialize(serializedProto); assertTrue(deserialized instanceof DynamicMessage); deserialized = deserializer.deserialize(serializedJson); - assertTrue(deserialized instanceof JsonGenericObject); + assertTrue(deserialized instanceof WithSchema); Serializer jsonStringDeserializer = SerializerFactory.deserializeAsJsonString(config); serializedAvro.position(0); @@ -429,11 +430,11 @@ public void testNoEncodingJson() throws IOException { serialized = serializer.serialize(user1); - Serializer genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); - JsonGenericObject generic = genericDeserializer.deserialize(serialized); + WithSchema generic = genericDeserializer.deserialize(serialized); assertNotNull(generic.getObject()); - assertNull(generic.getJsonSchema()); + assertNull(generic.getSchemaContainer()); } @Data diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java index f7708373a..62f732400 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java @@ -49,7 +49,7 @@ public class SchemaDefinitions { .noDefault() .endRecord(); - public static final String JSON_SCHEMA_STRING = "{\"id\": \"person.json\", " + + public static final String JSON_SCHEMA_STRING = "{" + "\"title\": \"Person\", " + "\"type\": \"object\", " + "\"properties\": { " + From 223df77fe23a675791c8e0348320fc8ca538ed0c Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 2 Jul 2020 21:39:01 -0700 Subject: [PATCH 44/70] bug fixes Signed-off-by: Shivesh Ranjan --- .../schemaregistry/schemas/JSONSchema.java | 18 ++++++------- ...izer.java => JsonGenericDeserializer.java} | 10 +++---- .../serializers/JsonSerializerFactory.java | 8 +++--- .../serializers/JsonStringDeserializer.java | 3 +-- .../MultiFormatSerializerFactory.java | 4 +-- .../serializers/SerializerFactory.java | 4 +-- .../serializers/WithSchema.java | 6 ++--- .../serializers/SerializerTest.java | 26 ++++++++++++++----- 8 files changed, 45 insertions(+), 34 deletions(-) rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{JsonGenericDeserlizer.java => JsonGenericDeserializer.java} (75%) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java index f8925ac19..4cd256789 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -30,6 +30,8 @@ * @param Type of element. */ public class JSONSchema implements SchemaContainer { + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + @Getter private final String schemaString; private final Class base; @@ -74,10 +76,9 @@ private JSONSchema(SchemaInfo schemaInfo, JsonSchema schema, String schemaString */ @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) public static JSONSchema of(Class tClass) { - ObjectMapper objectMapper = new ObjectMapper(); - JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(objectMapper); + JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(OBJECT_MAPPER); JsonSchema schema = schemaGen.generateSchema(tClass); - String schemaString = objectMapper.writeValueAsString(schema); + String schemaString = OBJECT_MAPPER.writeValueAsString(schema); return new JSONSchema<>(schema, tClass.getName(), schemaString, tClass); } @@ -91,8 +92,7 @@ public static JSONSchema of(Class tClass) { */ @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) public static JSONSchema of(String type, String schemaString) { - ObjectMapper objectMapper = new ObjectMapper(); - JsonSchema schema = objectMapper.readValue(schemaString, JsonSchema.class); + JsonSchema schema = OBJECT_MAPPER.readValue(schemaString, JsonSchema.class); return new JSONSchema<>(schema, type, schemaString, Object.class); } @@ -108,10 +108,9 @@ public static JSONSchema of(String type, String schemaString) { */ @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) public static JSONSchema ofBaseType(Class tDerived, Class tBase) { - ObjectMapper objectMapper = new ObjectMapper(); - JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(objectMapper); + JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(OBJECT_MAPPER); JsonSchema schema = schemaGen.generateSchema(tDerived); - String schemaString = objectMapper.writeValueAsString(schema); + String schemaString = OBJECT_MAPPER.writeValueAsString(schema); return new JSONSchema<>(schema, tDerived.getName(), schemaString, tBase, tDerived); } @@ -124,10 +123,9 @@ public static JSONSchema ofBaseType(Class tDerived, Class */ @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) public static JSONSchema from(SchemaInfo schemaInfo) { - ObjectMapper objectMapper = new ObjectMapper(); String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); - JsonSchema schema = objectMapper.readValue(schemaString, JsonSchema.class); + JsonSchema schema = OBJECT_MAPPER.readValue(schemaString, JsonSchema.class); return new JSONSchema<>(schemaInfo, schema, schemaString, Object.class); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java similarity index 75% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java index fbbbbdb54..a865ffcfa 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java @@ -21,11 +21,11 @@ import java.io.InputStream; import java.util.Map; -class JsonGenericDeserlizer extends AbstractPravegaDeserializer> { +class JsonGenericDeserializer extends AbstractPravegaDeserializer> { private final ObjectMapper objectMapper; - JsonGenericDeserlizer(String groupId, SchemaRegistryClient client, - SerializerConfig.Decoder decoder, EncodingCache encodingCache) { + JsonGenericDeserializer(String groupId, SchemaRegistryClient client, + SerializerConfig.Decoder decoder, EncodingCache encodingCache) { super(groupId, client, null, false, decoder, encodingCache); this.objectMapper = new ObjectMapper(); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); @@ -35,8 +35,8 @@ class JsonGenericDeserlizer extends AbstractPravegaDeserializer> @SneakyThrows({JsonProcessingException.class, IOException.class}) @Override - protected WithSchema deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { - Map obj = objectMapper.readValue(inputStream, Map.class); + protected WithSchema deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + Object obj = objectMapper.readValue(inputStream, Object.class); return new WithSchema<>(writerSchemaInfo, obj, (x, y) -> (Map) y); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java index d3b464564..ba4ce2293 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java @@ -51,7 +51,7 @@ static Serializer deserializer(SerializerConfig config, JSONSchema sch return new JsonDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); } - static Serializer> genericDeserializer(SerializerConfig config) { + static Serializer> genericDeserializer(SerializerConfig config) { SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : config.getRegistryConfigOrClient().getRight(); @@ -60,7 +60,7 @@ static Serializer> genericDeserializer(SerializerConfig config) EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - return new JsonGenericDeserlizer(groupId, schemaRegistryClient, config.getDecoder(), + return new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoder(), encodingCache); } @@ -108,7 +108,7 @@ static Serializer multiTypeDeserializer( deserializerMap, config.getDecoder(), encodingCache); } - static Serializer>> typedOrGenericDeserializer( + static Serializer>> typedOrGenericDeserializer( SerializerConfig config, Map, JSONSchema> schemas) { String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? @@ -122,7 +122,7 @@ static Serializer>> typedOrGenericDeserializer( Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); - JsonGenericDeserlizer genericDeserializer = new JsonGenericDeserlizer(groupId, schemaRegistryClient, config.getDecoder(), + JsonGenericDeserializer genericDeserializer = new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoder(), encodingCache); return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java index 91c35bd26..647e02ae6 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.io.InputStream; -import java.util.Map; class JsonStringDeserializer extends AbstractPravegaDeserializer { private final ObjectMapper objectMapper; @@ -36,7 +35,7 @@ class JsonStringDeserializer extends AbstractPravegaDeserializer { @SneakyThrows({JsonProcessingException.class, IOException.class}) @Override protected String deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { - Map obj = objectMapper.readValue(inputStream, Map.class); + Object obj = objectMapper.readValue(inputStream, Object.class); return objectMapper.writeValueAsString(obj); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java index ce3429d89..c93137e1d 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java @@ -98,7 +98,7 @@ private static Serializer deserializeAsTInternal(SerializerConfig config, failOnCodecMismatch(schemaRegistryClient, config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - AbstractPravegaDeserializer json = new JsonGenericDeserlizer(config.getGroupId(), schemaRegistryClient, + AbstractPravegaDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, config.getDecoder(), encodingCache); AbstractPravegaDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), encodingCache); @@ -133,7 +133,7 @@ private static Serializer> deserializerInternal(SerializerConf failOnCodecMismatch(schemaRegistryClient, config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - AbstractPravegaDeserializer json = new JsonGenericDeserlizer(config.getGroupId(), schemaRegistryClient, + AbstractPravegaDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, config.getDecoder(), encodingCache); AbstractPravegaDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), encodingCache); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index a48ff4737..d9be7201e 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -266,7 +266,7 @@ public static Serializer jsonDeserializer(SerializerConfig config, JSONSc * @param config Serializer Config used for instantiating a new serializer. * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer> jsonGenericDeserializer(SerializerConfig config) { + public static Serializer> jsonGenericDeserializer(SerializerConfig config) { return JsonSerializerFactory.genericDeserializer(config); } @@ -319,7 +319,7 @@ public static Serializer jsonMultiTypeDeserializer( * @param Base type of schemas. * @return a Deserializer which can deserialize events of different types in the stream into typed objects. */ - public static Serializer>> jsonTypedOrGenericDeserializer( + public static Serializer>> jsonTypedOrGenericDeserializer( SerializerConfig config, Map, JSONSchema> schemas) { return JsonSerializerFactory.typedOrGenericDeserializer(config, schemas); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java index be17749a0..9216f79db 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java @@ -81,7 +81,7 @@ public boolean hasAvroSchema() { } /** - * Avro Schema for the underlying deserialized object. This is available if {@link this#hasAvroSchema()} returns true. + * Avro Schema for the underlying deserialized object. This is available if {@link WithSchema#hasAvroSchema()} returns true. * This means underlying object was serialized as avro. * * @return Protobuf {@link Schema} representing the schema for the object. @@ -101,7 +101,7 @@ public boolean hasProtobufSchema() { } /** - * Protobuf Schema for the underlying deserialized object. This is available if {@link this#hasProtobufSchema()} returns true. + * Protobuf Schema for the underlying deserialized object. This is available if {@link WithSchema#hasProtobufSchema()} returns true. * This means underlying object was serialized as protobuf. * * @return Protobuf {@link com.google.protobuf.DescriptorProtos.FileDescriptorSet} representing the schema for the object. @@ -121,7 +121,7 @@ public boolean hasJsonSchema() { } /** - * Json Schema for the underlying deserialized object. This is available if {@link this#hasJsonSchema()} returns true. + * Json Schema for the underlying deserialized object. This is available if {@link WithSchema#hasJsonSchema()} returns true. * This means underlying object was serialized as Json. * * @return Protobuf {@link JsonSchema} representing the schema for the object. diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index bcbad6943..48ca2baef 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -261,8 +261,8 @@ public void testJsonSerializers() { assertEquals(deserialized, user1); serialized = serializer.serialize(user1); - Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); - WithSchema generic = genericDeserializer.deserialize(serialized); + Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + WithSchema generic = genericDeserializer.deserialize(serialized); assertEquals(((JSONSchema) generic.getSchemaContainer()).getSchema(), schema1.getSchema()); assertEquals(((Map) generic.getObject()).size(), 4); @@ -271,6 +271,20 @@ public void testJsonSerializers() { String str = stringDeserializer.deserialize(serialized); assertFalse(Strings.isNullOrEmpty(str)); + String schemaString = "{\"type\": \"object\",\"title\": \"The external data schema\",\"properties\": {\"content\": {\"type\": \"string\"}}}"; + + JSONSchema myData = JSONSchema.of("MyData", schemaString); + VersionInfo versionInfo3 = new VersionInfo("myData", 0, 2); + doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(myData.getSchemaInfo())); + doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); + doAnswer(x -> new EncodingInfo(versionInfo3, myData.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + + Serializer serializer2 = SerializerFactory.jsonSerializer(config, myData); + Map jsonObject = new HashMap<>(); + jsonObject.put("content", "mxx"); + + ByteBuffer s = serializer2.serialize(jsonObject); + str = stringDeserializer.deserialize(s); // multi type DerivedUser2 user2 = new DerivedUser2("user", new Address("street", "city"), 2, "user2"); @@ -291,9 +305,9 @@ public void testJsonSerializers() { Map, JSONSchema> map2 = new HashMap<>(); map2.put(DerivedUser1.class, schema1Base); - Serializer>> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); + Serializer>> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); serialized = multiSerializer.serialize(user1); - Either> fallback = fallbackDeserializer.deserialize(serialized); + Either> fallback = fallbackDeserializer.deserialize(serialized); assertTrue(fallback.isLeft()); assertEquals(fallback.getLeft(), user1); @@ -430,9 +444,9 @@ public void testNoEncodingJson() throws IOException { serialized = serializer.serialize(user1); - Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); - WithSchema generic = genericDeserializer.deserialize(serialized); + WithSchema generic = genericDeserializer.deserialize(serialized); assertNotNull(generic.getObject()); assertNull(generic.getSchemaContainer()); } From 8c69b3f2a4642e0bc1e528ab840257664da5dd30 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Fri, 3 Jul 2020 08:38:45 -0700 Subject: [PATCH 45/70] PR comments Signed-off-by: Shivesh Ranjan --- .../schemaregistry/common/NameUtil.java | 8 +- .../schemaregistry/GroupIdGenerator.java | 10 +- .../pravega/schemaregistry/codec/Codec.java | 30 +++- .../schemaregistry/codec/CodecFactory.java | 129 ------------------ .../schemaregistry/schemas/AvroSchema.java | 17 ++- .../schemaregistry/schemas/JSONSchema.java | 56 ++++---- .../schemas/ProtobufSchema.java | 46 ++++--- .../{SchemaContainer.java => Schema.java} | 8 +- ...ializer.java => AbstractDeserializer.java} | 54 ++++---- ...erializer.java => AbstractSerializer.java} | 59 ++++---- .../serializers/AvroDeserlizer.java | 27 ++-- .../serializers/AvroGenericDeserlizer.java | 24 ++-- .../serializers/AvroSerializer.java | 12 +- .../serializers/AvroSerializerFactory.java | 46 ++----- .../schemaregistry/serializers/Codecs.java | 127 +++++++++++++++++ ...erializer.java => CustomDeserializer.java} | 2 +- ...aSerializer.java => CustomSerializer.java} | 2 +- .../serializers/CustomSerializerFactory.java | 28 ++-- .../serializers/EncodingCache.java | 23 +++- .../serializers/FailingSerializer.java | 26 ++++ .../serializers/JsonDeserlizer.java | 7 +- .../serializers/JsonGenericDeserializer.java | 7 +- .../serializers/JsonSerializer.java | 7 +- .../serializers/JsonSerializerFactory.java | 43 ++---- .../serializers/JsonStringDeserializer.java | 7 +- .../MultiFormatSerializerFactory.java | 64 ++++----- .../MultiFormatWithSchemaDeserializer.java | 9 +- .../MultipleFormatDeserializer.java | 9 +- .../serializers/MultipleFormatSerializer.java | 10 +- .../MultiplexedAndGenericDeserializer.java | 15 +- .../serializers/MultiplexedDeserializer.java | 11 +- .../serializers/MultiplexedSerializer.java | 6 +- .../serializers/ProtobufDeserlizer.java | 11 +- .../ProtobufGenericDeserlizer.java | 97 ++++++------- .../serializers/ProtobufSerializer.java | 7 +- .../ProtobufSerializerFactory.java | 45 ++---- .../serializers/SerializerConfig.java | 44 +++--- .../serializers/SerializerFactory.java | 12 +- .../serializers/SerializerFactoryHelper.java | 28 +++- .../serializers/WithSchema.java | 113 ++++++++------- .../pravega/schemaregistry/GroupIdTest.java | 2 +- .../schemaregistry/codec/CodecTest.java | 16 ++- .../schemaregistry/schemas/TestSchemas.java | 3 +- .../schemaregistry/serializers/CacheTest.java | 6 +- .../serializers/SerializerTest.java | 32 ++--- 45 files changed, 672 insertions(+), 673 deletions(-) delete mode 100644 serializers/src/main/java/io/pravega/schemaregistry/codec/CodecFactory.java rename serializers/src/main/java/io/pravega/schemaregistry/schemas/{SchemaContainer.java => Schema.java} (64%) rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{AbstractPravegaDeserializer.java => AbstractDeserializer.java} (71%) rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{AbstractPravegaSerializer.java => AbstractSerializer.java} (74%) create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{PravegaDeserializer.java => CustomDeserializer.java} (92%) rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{PravegaSerializer.java => CustomSerializer.java} (92%) create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/FailingSerializer.java diff --git a/common/src/main/java/io/pravega/schemaregistry/common/NameUtil.java b/common/src/main/java/io/pravega/schemaregistry/common/NameUtil.java index 72ef5e158..6f1b4ae16 100644 --- a/common/src/main/java/io/pravega/schemaregistry/common/NameUtil.java +++ b/common/src/main/java/io/pravega/schemaregistry/common/NameUtil.java @@ -14,6 +14,9 @@ public class NameUtil { * Extracts the name from the fully qualified type name. Name represents the last token after ".". * If the qualified name does not contain "." then the name is same as qualified name. * + * Example: io.pravega.MyObject will return MyObject + * Example: MyObject will return MyObject + * * @param qualifiedName qualified name to extract name from. * @return extracted name. */ @@ -25,7 +28,10 @@ public static String extractName(String qualifiedName) { /** * Extracts name and the prefix qualifier before the name. Name represents the last token after ".". * Qualifier is the prefix before the name. - * If the qualified name does not contain "." then the name is same as qualified name and qualifier is empty string. + * If the qualified name does not contain "." then the name is same as qualified name and qualifier is empty string. + * + * Example: io.pravega.MyObject will return ["MyObject", "io.pravega"] + * Example: MyObject will return ["MyObject", ""] * * @param qualifiedName qualified name to extract tokens from. * @return an array containing name at index 0 and qualifier at index 1. diff --git a/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java b/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java index 2a4b4da4c..03c9eb8bc 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java @@ -11,7 +11,6 @@ import com.google.common.base.Preconditions; import io.pravega.shared.NameUtils; -import lombok.SneakyThrows; /** * Defines strategies for generating groupId for stream. @@ -21,19 +20,18 @@ public class GroupIdGenerator { private GroupIdGenerator() { } - @SneakyThrows - public static String getGroupId(Type type, String... args) { - switch (type) { + public static String getGroupId(Scheme scheme, String... args) { + switch (scheme) { case QualifiedStreamName: Preconditions.checkNotNull(args); Preconditions.checkArgument(args.length == 2); return NameUtils.getScopedStreamName(args[0], args[1]); default: - throw new IllegalArgumentException(); + throw new IllegalArgumentException("Unknown Group id generation schema."); } } - public enum Type { + public enum Scheme { QualifiedStreamName, } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java index 4cfaf7052..6dbd2773b 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java @@ -9,15 +9,41 @@ */ package io.pravega.schemaregistry.codec; +import io.pravega.schemaregistry.contract.data.EncodingInfo; + +import java.io.IOException; import java.nio.ByteBuffer; /** * Codec interface that defines methods to encode and decoder data for a given codec type. */ public interface Codec { + /** + * String name identifying the Codec Type. This should be same as the codecType that is registered for the group + * in schema registry service. The serializers will use this codec to encode the data and deserializers will find + * the decoder for the encoded data from {@link EncodingInfo#codecType} + * + * @return Name of the codec. + */ String getCodecType(); - ByteBuffer encode(ByteBuffer data); + /** + * Implementation should encode the remaining bytes in the buffer and return a new ByteBuffer that includes + * the encoded data at its current position. + * + * @param data ByteBuffer to encode. + * @return encoded ByteBuffer with position set to the start of encoded data. + * @throws IOException IOException can be thrown while reading from or writing to byte buffers. + */ + ByteBuffer encode(ByteBuffer data) throws IOException; - ByteBuffer decode(ByteBuffer data); + /** + * Implementation should decode the remaining bytes in the buffer and return a new ByteBuffer that includes + * the decoded data at its current position. + * + * @param data encoded ByteBuffer to decode. + * @return decoded ByteBuffer with position set to the start of decoded data. + * @throws IOException can be thrown while reading from or writing to byte buffers. + */ + ByteBuffer decode(ByteBuffer data) throws IOException; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/CodecFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/CodecFactory.java deleted file mode 100644 index d3d74b572..000000000 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/CodecFactory.java +++ /dev/null @@ -1,129 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry.codec; - -import lombok.SneakyThrows; -import org.xerial.snappy.Snappy; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.zip.GZIPInputStream; -import java.util.zip.GZIPOutputStream; - -/** - * Factory class for creating codecs for codec types . - */ -public class CodecFactory { - public static final String NONE = ""; - public static final String MIME_GZIP = "application/x-gzip"; - public static final String MIME_SNAPPY = "application/x-snappy-framed"; - - private static final Noop NOOP = new Noop(); - private static final GZipCodec GZIP_COMPRESSOR = new GZipCodec(); - private static final SnappyCodec SNAPPY_COMPRESSOR = new SnappyCodec(); - - public static Codec none() { - return NOOP; - } - - public static Codec gzip() { - return GZIP_COMPRESSOR; - } - - public static Codec snappy() { - return SNAPPY_COMPRESSOR; - } - - private static class Noop implements Codec { - @Override - public String getCodecType() { - return NONE; - } - - @Override - public ByteBuffer encode(ByteBuffer data) { - return data; - } - - @Override - public ByteBuffer decode(ByteBuffer data) { - return data; - } - } - - private static class GZipCodec implements Codec { - @Override - public String getCodecType() { - return MIME_GZIP; - } - - @SneakyThrows(IOException.class) - @Override - public ByteBuffer encode(ByteBuffer data) { - byte[] array = new byte[data.remaining()]; - data.get(array); - - ByteArrayOutputStream bos = new ByteArrayOutputStream(array.length); - GZIPOutputStream gzipOS = new GZIPOutputStream(bos); - gzipOS.write(array); - gzipOS.close(); - byte[] compressed = bos.toByteArray(); - return ByteBuffer.wrap(compressed); - } - - @SneakyThrows(IOException.class) - @Override - public ByteBuffer decode(ByteBuffer data) { - byte[] array = new byte[data.remaining()]; - data.get(array); - - ByteArrayInputStream bis = new ByteArrayInputStream(array); - ByteArrayOutputStream bos = new ByteArrayOutputStream(); - GZIPInputStream gzipIS = new GZIPInputStream(bis); - byte[] buffer = new byte[1024]; - int len; - while ((len = gzipIS.read(buffer)) != -1) { - bos.write(buffer, 0, len); - } - byte[] uncompressed = bos.toByteArray(); - return ByteBuffer.wrap(uncompressed); - } - } - - private static class SnappyCodec implements Codec { - @Override - public String getCodecType() { - return MIME_SNAPPY; - } - - @SneakyThrows(IOException.class) - @Override - public ByteBuffer encode(ByteBuffer data) { - byte[] array = new byte[data.remaining()]; - data.get(array); - - byte[] compressed = Snappy.compress(array); - return ByteBuffer.wrap(compressed); - } - - @SneakyThrows(IOException.class) - @Override - public ByteBuffer decode(ByteBuffer data) { - byte[] array = new byte[data.remaining()]; - data.get(array); - - byte[] uncompressed = Snappy.uncompress(array); - return ByteBuffer.wrap(uncompressed); - } - } - -} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java index caaea741e..09efb80f2 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java @@ -15,7 +15,6 @@ import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; import lombok.Getter; -import org.apache.avro.Schema; import org.apache.avro.generic.GenericRecord; import org.apache.avro.reflect.ReflectData; import org.apache.avro.specific.SpecificData; @@ -28,14 +27,14 @@ * * @param Type of element. */ -public class AvroSchema implements SchemaContainer { +public class AvroSchema implements Schema { @Getter - private final Schema schema; + private final org.apache.avro.Schema schema; private final SchemaInfo schemaInfo; @Getter private final Class tClass; - private AvroSchema(Schema schema, Class tClass) { + private AvroSchema(org.apache.avro.Schema schema, Class tClass) { this.schema = schema; this.schemaInfo = new SchemaInfo(schema.getFullName(), SerializationFormat.Avro, getSchemaBytes(), ImmutableMap.of()); @@ -44,7 +43,7 @@ private AvroSchema(Schema schema, Class tClass) { private AvroSchema(SchemaInfo schemaInfo) { String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); - this.schema = new Schema.Parser().parse(schemaString); + this.schema = new org.apache.avro.Schema.Parser().parse(schemaString); this.schemaInfo = schemaInfo; this.tClass = null; } @@ -59,7 +58,7 @@ private AvroSchema(SchemaInfo schemaInfo) { * @return {@link AvroSchema} with generic type T that extracts and captures the avro schema. */ public static AvroSchema of(Class tClass) { - Schema schema; + org.apache.avro.Schema schema; if (SpecificRecordBase.class.isAssignableFrom(tClass)) { schema = SpecificData.get().getSchema(tClass); } else { @@ -75,7 +74,7 @@ public static AvroSchema of(Class tClass) { * @param schema Schema to use. * @return Returns an AvroSchema with {@link GenericRecord} type. */ - public static AvroSchema of(Schema schema) { + public static AvroSchema of(org.apache.avro.Schema schema) { return new AvroSchema<>(schema, Object.class); } @@ -85,8 +84,8 @@ public static AvroSchema of(Schema schema) { * @param schema Schema to use. * @return Returns an AvroSchema with {@link GenericRecord} type. */ - public static AvroSchema ofRecord(Schema schema) { - Preconditions.checkArgument(schema.getType().equals(Schema.Type.RECORD)); + public static AvroSchema ofRecord(org.apache.avro.Schema schema) { + Preconditions.checkArgument(schema.getType().equals(org.apache.avro.Schema.Type.RECORD)); return new AvroSchema<>(schema, GenericRecord.class); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java index 4cd256789..9b2d849ae 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -10,7 +10,6 @@ package io.pravega.schemaregistry.schemas; import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.module.jsonSchema.JsonSchema; import com.fasterxml.jackson.module.jsonSchema.JsonSchemaGenerator; @@ -19,7 +18,6 @@ import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; import lombok.Getter; -import lombok.SneakyThrows; import org.apache.avro.specific.SpecificRecordBase; import java.nio.ByteBuffer; @@ -29,7 +27,7 @@ * * @param Type of element. */ -public class JSONSchema implements SchemaContainer { +public class JSONSchema implements Schema { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); @Getter @@ -74,13 +72,16 @@ private JSONSchema(SchemaInfo schemaInfo, JsonSchema schema, String schemaString * @param Type of the Java class. * @return {@link JSONSchema} with generic type T that extracts and captures the json schema. */ - @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) public static JSONSchema of(Class tClass) { - JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(OBJECT_MAPPER); - JsonSchema schema = schemaGen.generateSchema(tClass); - String schemaString = OBJECT_MAPPER.writeValueAsString(schema); - - return new JSONSchema<>(schema, tClass.getName(), schemaString, tClass); + try { + JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(OBJECT_MAPPER); + JsonSchema schema = schemaGen.generateSchema(tClass); + String schemaString = OBJECT_MAPPER.writeValueAsString(schema); + + return new JSONSchema<>(schema, tClass.getName(), schemaString, tClass); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException(e); + } } /** @@ -90,10 +91,13 @@ public static JSONSchema of(Class tClass) { * @param schemaString Schema string to use. * @return Returns an JSONSchema with {@link Object} type. */ - @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) public static JSONSchema of(String type, String schemaString) { - JsonSchema schema = OBJECT_MAPPER.readValue(schemaString, JsonSchema.class); - return new JSONSchema<>(schema, type, schemaString, Object.class); + try { + JsonSchema schema = OBJECT_MAPPER.readValue(schemaString, JsonSchema.class); + return new JSONSchema<>(schema, type, schemaString, Object.class); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException(e); + } } /** @@ -106,13 +110,16 @@ public static JSONSchema of(String type, String schemaString) { * @param Type of base class. * @return Returns an AvroSchema with {@link SpecificRecordBase} type. */ - @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) public static JSONSchema ofBaseType(Class tDerived, Class tBase) { - JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(OBJECT_MAPPER); - JsonSchema schema = schemaGen.generateSchema(tDerived); - String schemaString = OBJECT_MAPPER.writeValueAsString(schema); - - return new JSONSchema<>(schema, tDerived.getName(), schemaString, tBase, tDerived); + try { + JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(OBJECT_MAPPER); + JsonSchema schema = schemaGen.generateSchema(tDerived); + String schemaString = OBJECT_MAPPER.writeValueAsString(schema); + + return new JSONSchema<>(schema, tDerived.getName(), schemaString, tBase, tDerived); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException(e); + } } /** @@ -121,12 +128,15 @@ public static JSONSchema ofBaseType(Class tDerived, Class * @param schemaInfo Schema info to translate into json schema. * @return Returns an JSONSchema with {@link Object} type. */ - @SneakyThrows({JsonMappingException.class, JsonProcessingException.class}) public static JSONSchema from(SchemaInfo schemaInfo) { - String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); - - JsonSchema schema = OBJECT_MAPPER.readValue(schemaString, JsonSchema.class); - return new JSONSchema<>(schemaInfo, schema, schemaString, Object.class); + try { + String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); + + JsonSchema schema = OBJECT_MAPPER.readValue(schemaString, JsonSchema.class); + return new JSONSchema<>(schemaInfo, schema, schemaString, Object.class); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException(e); + } } private ByteBuffer getSchemaBytes() { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java index 9df3938fe..e057f3e23 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java @@ -13,14 +13,15 @@ import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DynamicMessage; import com.google.protobuf.GeneratedMessageV3; +import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.Message; import com.google.protobuf.Parser; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; import lombok.Data; import lombok.Getter; -import lombok.SneakyThrows; +import java.lang.reflect.InvocationTargetException; import java.nio.ByteBuffer; /** @@ -30,7 +31,7 @@ * @param Type of element. */ @Data -public class ProtobufSchema implements SchemaContainer { +public class ProtobufSchema implements Schema { @Getter private final Parser parser; @Getter @@ -44,15 +45,12 @@ private ProtobufSchema(String name, Parser parser, DescriptorProtos.FileDescr this.schemaInfo = new SchemaInfo(name, SerializationFormat.Protobuf, getSchemaBytes(), ImmutableMap.of()); } - @SneakyThrows - private ProtobufSchema(SchemaInfo schemaInfo) { - DescriptorProtos.FileDescriptorSet fileDescriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaInfo.getSchemaData()); - + private ProtobufSchema(DescriptorProtos.FileDescriptorSet fileDescriptorSet, SchemaInfo schemaInfo) { this.parser = null; this.descriptorProto = fileDescriptorSet; this.schemaInfo = schemaInfo; } - + private ByteBuffer getSchemaBytes() { return ByteBuffer.wrap(descriptorProto.toByteArray()); } @@ -70,11 +68,15 @@ public SchemaInfo getSchemaInfo() { * @param Type of protobuf message * @return {@link ProtobufSchema} with generic type T that captures protobuf schema and parser. */ - @SneakyThrows @SuppressWarnings("unchecked") public static ProtobufSchema of(Class tClass, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { - T defaultInstance = (T) tClass.getMethod("getDefaultInstance").invoke(null); + T defaultInstance; + try { + defaultInstance = (T) tClass.getMethod("getDefaultInstance").invoke(null); + } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { + throw new IllegalArgumentException(e); + } Parser tParser = (Parser) defaultInstance.getParserForType(); return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getFullName(), tParser, fileDescriptorSet); } @@ -89,8 +91,6 @@ public static ProtobufSchema of(Class tClas * @param fileDescriptorSet file descriptor set representing a protobuf schema. * @return {@link ProtobufSchema} with generic type {@link DynamicMessage} that captures protobuf schema. */ - @SneakyThrows - @SuppressWarnings("unchecked") public static ProtobufSchema of(String name, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { return new ProtobufSchema<>(name, null, fileDescriptorSet); } @@ -106,12 +106,16 @@ public static ProtobufSchema of(String name, DescriptorProtos.Fi * @param Type of protobuf message * @return {@link ProtobufSchema} with generic type {@link GeneratedMessageV3} that captures protobuf schema and parser of type T. */ - @SneakyThrows @SuppressWarnings("unchecked") - public static ProtobufSchema ofGeneratedMessageV3( - Class tClass, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { - T defaultInstance = (T) tClass.getMethod("getDefaultInstance").invoke(null); - Parser tParser = (Parser) defaultInstance.getParserForType(); + public static ProtobufSchema ofGeneratedMessageV3( + Class tClass, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { + T defaultInstance; + try { + defaultInstance = (T) tClass.getMethod("getDefaultInstance").invoke(null); + } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { + throw new IllegalArgumentException(e); + } + Parser tParser = (Parser) defaultInstance.getParserForType(); return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getFullName(), tParser, fileDescriptorSet); } @@ -122,10 +126,14 @@ public static ProtobufSchema ofGeneratedMessag * @param schemaInfo Schema Info * @return {@link ProtobufSchema} with generic type {@link DynamicMessage} that captures protobuf schema. */ - @SneakyThrows - @SuppressWarnings("unchecked") public static ProtobufSchema from(SchemaInfo schemaInfo) { - return new ProtobufSchema<>(schemaInfo); + try { + DescriptorProtos.FileDescriptorSet fileDescriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaInfo.getSchemaData()); + + return new ProtobufSchema<>(fileDescriptorSet, schemaInfo); + } catch (InvalidProtocolBufferException ex) { + throw new IllegalArgumentException(ex); + } } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/SchemaContainer.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/Schema.java similarity index 64% rename from serializers/src/main/java/io/pravega/schemaregistry/schemas/SchemaContainer.java rename to serializers/src/main/java/io/pravega/schemaregistry/schemas/Schema.java index f4f1a796e..d3bfc834e 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/SchemaContainer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/Schema.java @@ -16,6 +16,12 @@ * * @param Type of object. */ -public interface SchemaContainer { +public interface Schema { + /** + * Returns the {@link SchemaInfo} object that is computed from the schema object. SchemaInfo is the object that encapsulates + * all schema metadata to be shared with the schema registry service. + * + * @return Schema Info object derived from the schema object. + */ SchemaInfo getSchemaInfo(); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java similarity index 71% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java index 1d467ccc5..c4c43c3cd 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java @@ -9,27 +9,26 @@ */ package io.pravega.schemaregistry.serializers; -import io.pravega.client.stream.Serializer; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.GroupProperties; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.schemas.SchemaContainer; +import io.pravega.schemaregistry.schemas.Schema; +import lombok.SneakyThrows; import lombok.Synchronized; import lombok.extern.slf4j.Slf4j; import javax.annotation.Nullable; import java.io.ByteArrayInputStream; +import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; -import static io.pravega.schemaregistry.codec.CodecFactory.NONE; - @Slf4j -abstract class AbstractPravegaDeserializer implements Serializer { +abstract class AbstractDeserializer extends FailingSerializer { private static final byte PROTOCOL = 0x0; private static final int HEADER_SIZE = 1 + Integer.BYTES; @@ -43,12 +42,12 @@ abstract class AbstractPravegaDeserializer implements Serializer { private final boolean skipHeaders; private final EncodingCache encodingCache; - protected AbstractPravegaDeserializer(String groupId, - SchemaRegistryClient client, - @Nullable SchemaContainer schema, - boolean skipHeaders, - SerializerConfig.Decoder decoder, - EncodingCache encodingCache) { + protected AbstractDeserializer(String groupId, + SchemaRegistryClient client, + @Nullable Schema schema, + boolean skipHeaders, + SerializerConfig.Decoder decoder, + EncodingCache encodingCache) { this.groupId = groupId; this.client = client; this.encodingCache = encodingCache; @@ -81,50 +80,43 @@ private void initialize() { } } - @Override - public ByteBuffer serialize(T obj) { - throw new IllegalStateException(); - } - + @SneakyThrows(IOException.class) @Override public T deserialize(ByteBuffer data) { + int start = data.arrayOffset() + data.position(); if (this.encodeHeader.get()) { SchemaInfo writerSchema = null; - String codecType = NONE; + ByteBuffer decoded; if (skipHeaders) { - int currentPos = data.position(); - data.position(currentPos + HEADER_SIZE); + data.position(start + HEADER_SIZE); + decoded = data; } else { byte protocol = data.get(); EncodingId encodingId = new EncodingId(data.getInt()); EncodingInfo encodingInfo = encodingCache.getGroupEncodingInfo(encodingId); - codecType = encodingInfo.getCodecType(); writerSchema = encodingInfo.getSchemaInfo(); + decoded = decoder.decode(encodingInfo.getCodecType(), data); } - - ByteBuffer decoded = decoder.decode(codecType, data); - byte[] array = new byte[decoded.remaining()]; - decoded.get(array); - InputStream inputStream = new ByteArrayInputStream(array); + ByteArrayInputStream bais = new ByteArrayInputStream(decoded.array(), + decoded.arrayOffset() + decoded.position(), decoded.remaining()); if (schemaInfo == null) { // deserialize into writer schema // pass writer schema for schema to be read into - return deserialize(inputStream, writerSchema, writerSchema); + return deserialize(bais, writerSchema, writerSchema); } else { // pass reader schema for schema on read to the underlying implementation - return deserialize(inputStream, writerSchema, schemaInfo); + return deserialize(bais, writerSchema, schemaInfo); } } else { // pass reader schema for schema on read to the underlying implementation - byte[] array = new byte[data.remaining()]; - data.get(array); - InputStream inputStream = new ByteArrayInputStream(array); + ByteArrayInputStream inputStream = new ByteArrayInputStream(data.array(), + data.arrayOffset() + data.position(), data.remaining()); return deserialize(inputStream, null, schemaInfo); } } - protected abstract T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema); + protected abstract T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException; boolean isEncodeHeader() { return encodeHeader.get(); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java similarity index 74% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaSerializer.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java index 3ed2664ca..ac1c26fd8 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractPravegaSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java @@ -10,26 +10,25 @@ package io.pravega.schemaregistry.serializers; import com.google.common.base.Preconditions; -import io.pravega.client.stream.Serializer; -import io.pravega.common.util.BitConverter; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.codec.Codec; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.GroupProperties; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.VersionInfo; -import io.pravega.schemaregistry.schemas.SchemaContainer; +import io.pravega.schemaregistry.schemas.Schema; import lombok.Getter; import lombok.SneakyThrows; import java.io.ByteArrayOutputStream; +import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -abstract class AbstractPravegaSerializer implements Serializer { +abstract class AbstractSerializer extends FailingSerializer { private static final byte PROTOCOL = 0x0; private final String groupId; @@ -42,11 +41,11 @@ abstract class AbstractPravegaSerializer implements Serializer { private final Codec codec; private final boolean registerSchema; - protected AbstractPravegaSerializer(String groupId, - SchemaRegistryClient client, - SchemaContainer schema, - Codec codec, - boolean registerSchema) { + protected AbstractSerializer(String groupId, + SchemaRegistryClient client, + Schema schema, + Codec codec, + boolean registerSchema) { Preconditions.checkNotNull(groupId); Preconditions.checkNotNull(client); Preconditions.checkNotNull(codec); @@ -82,19 +81,11 @@ private void initialize() { } } - @SneakyThrows + @SneakyThrows(IOException.class) @Override public ByteBuffer serialize(T obj) { - ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); ByteArrayOutputStream dataStream = new ByteArrayOutputStream(); - if (this.encodeHeader.get()) { - Preconditions.checkNotNull(schemaInfo); - - outputStream.write(PROTOCOL); - BitConverter.writeInt(outputStream, encodingId.get().getId()); - } - // if schema is not null, pass the schema to the serializer implementation if (schemaInfo != null) { serialize(obj, schemaInfo, dataStream); @@ -104,20 +95,24 @@ public ByteBuffer serialize(T obj) { dataStream.flush(); - byte[] array = dataStream.toByteArray(); - - ByteBuffer encoded = codec.encode(ByteBuffer.wrap(array)); - array = new byte[encoded.remaining()]; - encoded.get(array); - - outputStream.write(array); - return ByteBuffer.wrap(outputStream.toByteArray()); + byte[] serialized = dataStream.toByteArray(); + + ByteBuffer byteBuffer; + if (this.encodeHeader.get()) { + Preconditions.checkNotNull(schemaInfo); + ByteBuffer encoded = codec.encode(ByteBuffer.wrap(serialized)); + int bufferSize = 5 + encoded.remaining(); + byteBuffer = ByteBuffer.allocate(bufferSize); + byteBuffer.put(PROTOCOL); + byteBuffer.putInt(encodingId.get().getId()); + byteBuffer.put(encoded); + byteBuffer.rewind(); + } else { + byteBuffer = ByteBuffer.wrap(serialized); + } + + return byteBuffer; } - protected abstract void serialize(T var, SchemaInfo schema, OutputStream outputStream); - - @Override - public T deserialize(ByteBuffer bytes) { - throw new IllegalStateException(); - } + protected abstract void serialize(T var, SchemaInfo schema, OutputStream outputStream) throws IOException; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java index a169b2ce8..9863320fa 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java @@ -11,13 +11,9 @@ import com.google.common.base.Charsets; import com.google.common.base.Preconditions; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.AvroSchema; -import lombok.SneakyThrows; import org.apache.avro.Schema; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.DecoderFactory; @@ -25,11 +21,13 @@ import org.apache.avro.specific.SpecificDatumReader; import org.apache.avro.specific.SpecificRecordBase; +import java.io.IOException; import java.io.InputStream; +import java.util.concurrent.ConcurrentHashMap; -class AvroDeserlizer extends AbstractPravegaDeserializer { +class AvroDeserlizer extends AbstractDeserializer { private final AvroSchema avroSchema; - private final LoadingCache knownSchemas; + private final ConcurrentHashMap knownSchemas; AvroDeserlizer(String groupId, SchemaRegistryClient client, AvroSchema schema, @@ -37,20 +35,17 @@ class AvroDeserlizer extends AbstractPravegaDeserializer { super(groupId, client, schema, false, decoder, encodingCache); Preconditions.checkNotNull(schema); this.avroSchema = schema; - this.knownSchemas = CacheBuilder.newBuilder().build(new CacheLoader() { - @Override - public Schema load(byte[] schemaData) throws Exception { - String schemaString = new String(schemaData, Charsets.UTF_8); - return new Schema.Parser().parse(schemaString); - } - }); + this.knownSchemas = new ConcurrentHashMap<>(); } - @SneakyThrows @Override - protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { Preconditions.checkNotNull(writerSchemaInfo); - Schema writerSchema = knownSchemas.get(writerSchemaInfo.getSchemaData().array()); + Schema writerSchema = knownSchemas.computeIfAbsent(writerSchemaInfo, x -> { + String schemaString = new String(x.getSchemaData().array(), Charsets.UTF_8); + return new Schema.Parser().parse(schemaString); + + }); Schema readerSchema = avroSchema.getSchema(); BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java index 9ab438238..9d3871eec 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java @@ -10,41 +10,33 @@ package io.pravega.schemaregistry.serializers; import com.google.common.base.Preconditions; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.AvroSchema; -import lombok.SneakyThrows; import org.apache.avro.Schema; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.DecoderFactory; import javax.annotation.Nullable; +import java.io.IOException; import java.io.InputStream; +import java.util.concurrent.ConcurrentHashMap; -class AvroGenericDeserlizer extends AbstractPravegaDeserializer { - private final LoadingCache knownSchemas; +class AvroGenericDeserlizer extends AbstractDeserializer { + private final ConcurrentHashMap knownSchemas; AvroGenericDeserlizer(String groupId, SchemaRegistryClient client, @Nullable AvroSchema schema, SerializerConfig.Decoder decoder, EncodingCache encodingCache) { super(groupId, client, schema, false, decoder, encodingCache); - this.knownSchemas = CacheBuilder.newBuilder().build(new CacheLoader() { - @Override - public Schema load(SchemaInfo schemaInfo) throws Exception { - return AvroSchema.from(schemaInfo).getSchema(); - } - }); + this.knownSchemas = new ConcurrentHashMap<>(); } - @SneakyThrows @Override - protected Object deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + protected Object deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { Preconditions.checkNotNull(writerSchemaInfo); - Schema writerSchema = knownSchemas.get(writerSchemaInfo); - Schema readerSchema = knownSchemas.get(readerSchemaInfo); + Schema writerSchema = knownSchemas.computeIfAbsent(writerSchemaInfo, x -> AvroSchema.from(x).getSchema()); + Schema readerSchema = knownSchemas.computeIfAbsent(readerSchemaInfo, x -> AvroSchema.from(x).getSchema()); GenericDatumReader genericDatumReader = new GenericDatumReader<>(writerSchema, readerSchema); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java index c9789a8b6..2a0169a2f 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java @@ -13,7 +13,6 @@ import io.pravega.schemaregistry.codec.Codec; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.AvroSchema; -import lombok.SneakyThrows; import org.apache.avro.Schema; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.generic.IndexedRecord; @@ -23,10 +22,10 @@ import org.apache.avro.specific.SpecificDatumWriter; import org.apache.avro.specific.SpecificRecord; -import java.io.ByteArrayOutputStream; +import java.io.IOException; import java.io.OutputStream; -class AvroSerializer extends AbstractPravegaSerializer { +class AvroSerializer extends AbstractSerializer { private final AvroSchema avroSchema; AvroSerializer(String groupId, SchemaRegistryClient client, AvroSchema schema, Codec codec, boolean registerSchema) { @@ -34,13 +33,11 @@ class AvroSerializer extends AbstractPravegaSerializer { this.avroSchema = schema; } - @SneakyThrows @Override - protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream) { + protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream) throws IOException { Schema schema = avroSchema.getSchema(); - ByteArrayOutputStream out = new ByteArrayOutputStream(); - BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null); + BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outputStream, null); if (IndexedRecord.class.isAssignableFrom(var.getClass())) { if (SpecificRecord.class.isAssignableFrom(var.getClass())) { @@ -56,7 +53,6 @@ protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream } encoder.flush(); - outputStream.write(out.toByteArray()); outputStream.flush(); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java index 75798ca0e..46c28193b 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java @@ -12,7 +12,6 @@ import com.google.common.base.Preconditions; import io.pravega.client.stream.Serializer; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; import io.pravega.schemaregistry.common.Either; import io.pravega.schemaregistry.schemas.AvroSchema; import lombok.extern.slf4j.Slf4j; @@ -31,11 +30,7 @@ class AvroSerializerFactory { static Serializer serializer(SerializerConfig config, AvroSchema schemaData) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemaData); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - registerCodec(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); String groupId = config.getGroupId(); return new AvroSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), config.isRegisterSchema()); } @@ -43,13 +38,8 @@ static Serializer serializer(SerializerConfig config, AvroSchema schem static Serializer deserializer(SerializerConfig config, AvroSchema schemaData) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemaData); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); String groupId = config.getGroupId(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); @@ -58,11 +48,7 @@ static Serializer deserializer(SerializerConfig config, AvroSchema sch static Serializer genericDeserializer(SerializerConfig config, @Nullable AvroSchema schemaData) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); return new AvroGenericDeserlizer(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); @@ -73,12 +59,8 @@ static Serializer multiTypeSerializer(SerializerConfig config, Map, AbstractPravegaSerializer> serializerMap = schemas + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + Map, AbstractSerializer> serializerMap = schemas .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> new AvroSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), config.isRegisterSchema()))); @@ -91,15 +73,11 @@ static Serializer multiTypeDeserializer( Preconditions.checkNotNull(schemas); String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - Map> deserializerMap = schemas + Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), x -> new AvroDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), @@ -112,18 +90,14 @@ static Serializer> typedOrGenericDeserializer( Preconditions.checkNotNull(schemas); String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - Map> deserializerMap = schemas + Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), x -> new AvroDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); - AbstractPravegaDeserializer genericDeserializer = new AvroGenericDeserlizer(groupId, schemaRegistryClient, + AbstractDeserializer genericDeserializer = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), encodingCache); return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, config.getDecoder(), encodingCache); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java new file mode 100644 index 000000000..a03ad2ccc --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java @@ -0,0 +1,127 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.schemaregistry.codec.Codec; +import lombok.Getter; +import org.xerial.snappy.Snappy; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; + +/** + * Utility class for creating codecs for none, snappy or gzip. + */ +public enum Codecs { + None(Constants.NOOP, Constants.NONE), + GzipCompressor(Constants.GZIP_CODEC, Constants.APPLICATION_X_GZIP), + SnappyCompressor(Constants.SNAPPY_CODEC, Constants.APPLICATION_X_SNAPPY_FRAMED); + + @Getter + private final Codec codec; + @Getter + private final String mimeType; + + Codecs(Codec codec, String mimeType) { + this.codec = codec; + this.mimeType = mimeType; + } + + private static class Noop implements Codec { + @Override + public String getCodecType() { + return Constants.NONE; + } + + @Override + public ByteBuffer encode(ByteBuffer data) { + return data; + } + + @Override + public ByteBuffer decode(ByteBuffer data) { + return data; + } + } + + private static class GZipCodec implements Codec { + @Override + public String getCodecType() { + return Constants.APPLICATION_X_GZIP; + } + + @Override + public ByteBuffer encode(ByteBuffer data) throws IOException { + ByteArrayOutputStream bos = new ByteArrayOutputStream(data.remaining()); + GZIPOutputStream gzipOS = new GZIPOutputStream(bos); + gzipOS.write(data.array(), data.arrayOffset() + data.position(), data.remaining()); + gzipOS.close(); + byte[] compressed = bos.toByteArray(); + return ByteBuffer.wrap(compressed); + } + + @Override + public ByteBuffer decode(ByteBuffer data) throws IOException { + byte[] array = new byte[data.remaining()]; + data.get(array); + + ByteArrayInputStream bis = new ByteArrayInputStream(array); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + GZIPInputStream gzipIS = new GZIPInputStream(bis); + byte[] buffer = new byte[1024]; + int len; + while ((len = gzipIS.read(buffer)) != -1) { + bos.write(buffer, 0, len); + } + byte[] uncompressed = bos.toByteArray(); + return ByteBuffer.wrap(uncompressed); + } + } + + private static class SnappyCodec implements Codec { + @Override + public String getCodecType() { + return Constants.APPLICATION_X_SNAPPY_FRAMED; + } + + @Override + public ByteBuffer encode(ByteBuffer data) throws IOException { + int capacity = Snappy.maxCompressedLength(data.remaining()); + ByteBuffer encoded = ByteBuffer.allocate(capacity); + + int size = Snappy.compress(data.array(), data.arrayOffset() + data.position(), + data.remaining(), encoded.array(), 0); + encoded.limit(size); + return encoded; + } + + @Override + public ByteBuffer decode(ByteBuffer data) throws IOException { + ByteBuffer decoded = ByteBuffer.allocate(Snappy.uncompressedLength(data.array(), data.arrayOffset() + data.position(), + data.remaining())); + Snappy.uncompress(data.array(), data.arrayOffset() + data.position(), + data.remaining(), decoded.array(), 0); + return decoded; + } + } + + static class Constants { + static final Noop NOOP = new Noop(); + static final GZipCodec GZIP_CODEC = new GZipCodec(); + static final SnappyCodec SNAPPY_CODEC = new SnappyCodec(); + static final String NONE = ""; + static final String APPLICATION_X_GZIP = "application/x-gzip"; + static final String APPLICATION_X_SNAPPY_FRAMED = "application/x-snappy-framed"; + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomDeserializer.java similarity index 92% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaDeserializer.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomDeserializer.java index cfb286bcd..a3575c87e 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomDeserializer.java @@ -13,6 +13,6 @@ import java.io.InputStream; -public interface PravegaDeserializer { +public interface CustomDeserializer { T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializer.java similarity index 92% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaSerializer.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializer.java index 6afe07cc1..7a508b734 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/PravegaSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializer.java @@ -13,6 +13,6 @@ import java.io.OutputStream; -public interface PravegaSerializer { +public interface CustomSerializer { void serialize(T var, SchemaInfo schema, OutputStream outputStream); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java index 37072677a..35f0d767a 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java @@ -11,30 +11,26 @@ import io.pravega.client.stream.Serializer; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import io.pravega.schemaregistry.schemas.SchemaContainer; +import io.pravega.schemaregistry.schemas.Schema; import lombok.extern.slf4j.Slf4j; import javax.annotation.Nullable; import java.io.InputStream; import java.io.OutputStream; -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.*; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; /** * Internal Factory class for Custom serializers and deserializers. */ @Slf4j class CustomSerializerFactory { - static Serializer serializer(SerializerConfig config, SchemaContainer schema, PravegaSerializer serializer) { + static Serializer serializer(SerializerConfig config, Schema schema, CustomSerializer serializer) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - registerCodec(schemaRegistryClient, config); - return new AbstractPravegaSerializer(groupId, schemaRegistryClient, + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + return new AbstractSerializer(groupId, schemaRegistryClient, schema, config.getCodec(), config.isRegisterSchema()) { @Override protected void serialize(T var, SchemaInfo schema, OutputStream outputStream) { @@ -43,19 +39,15 @@ protected void serialize(T var, SchemaInfo schema, OutputStream outputStream) { }; } - static Serializer deserializer(SerializerConfig config, @Nullable SchemaContainer schema, - PravegaDeserializer deserializer) { + static Serializer deserializer(SerializerConfig config, @Nullable Schema schema, + CustomDeserializer deserializer) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - return new AbstractPravegaDeserializer(groupId, schemaRegistryClient, schema, false, + return new AbstractDeserializer(groupId, schemaRegistryClient, schema, false, config.getDecoder(), encodingCache) { @Override protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java index f8bd3b1d7..f301dcefa 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java @@ -12,22 +12,28 @@ import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; +import io.pravega.common.Exceptions; import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.exceptions.RegistryExceptions; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; import lombok.Data; -import lombok.SneakyThrows; +import java.time.Duration; import java.util.concurrent.ExecutionException; /** * Local cache for storing schemas that are retrieved from the registry service. */ public class EncodingCache { + private static final Duration EXPIRY_AFTER_ACCESS = Duration.ofMinutes(20); + private final LoadingCache encodingCache; EncodingCache(String groupId, SchemaRegistryClient schemaRegistryClient) { - encodingCache = CacheBuilder.newBuilder().build(new CacheLoader() { + encodingCache = CacheBuilder.newBuilder() + .expireAfterAccess(EXPIRY_AFTER_ACCESS) + .build(new CacheLoader() { @Override public EncodingInfo load(EncodingId key) { return schemaRegistryClient.getEncodingInfo(groupId, key); @@ -35,9 +41,16 @@ public EncodingInfo load(EncodingId key) { }); } - @SneakyThrows(ExecutionException.class) - public EncodingInfo getGroupEncodingInfo(EncodingId encodingId) { - return encodingCache.get(encodingId); + EncodingInfo getGroupEncodingInfo(EncodingId encodingId) { + try { + return encodingCache.get(encodingId); + } catch (ExecutionException e) { + if (e.getCause() != null && Exceptions.unwrap(e.getCause()) instanceof RegistryExceptions) { + throw (RegistryExceptions) e.getCause(); + } else { + throw new RuntimeException(e.getCause()); + } + } } @Data diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/FailingSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/FailingSerializer.java new file mode 100644 index 000000000..e624eab46 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/FailingSerializer.java @@ -0,0 +1,26 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.client.stream.Serializer; + +import java.nio.ByteBuffer; + +class FailingSerializer implements Serializer { + @Override + public ByteBuffer serialize(T value) { + throw new IllegalStateException(); + } + + @Override + public T deserialize(ByteBuffer serializedValue) { + throw new IllegalStateException(); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java index f8d56982c..0996e3c05 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java @@ -10,20 +10,18 @@ package io.pravega.schemaregistry.serializers; import com.fasterxml.jackson.annotation.PropertyAccessor; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.JSONSchema; -import lombok.SneakyThrows; import java.io.IOException; import java.io.InputStream; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; -class JsonDeserlizer extends AbstractPravegaDeserializer { +class JsonDeserlizer extends AbstractDeserializer { private final JSONSchema jsonSchema; private final ObjectMapper objectMapper; @@ -39,9 +37,8 @@ class JsonDeserlizer extends AbstractPravegaDeserializer { objectMapper.setVisibility(PropertyAccessor.CREATOR, Visibility.ANY); } - @SneakyThrows({JsonProcessingException.class, IOException.class}) @Override - protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { return objectMapper.readValue(inputStream, jsonSchema.getTClass()); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java index a865ffcfa..ceee7fd94 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java @@ -11,17 +11,15 @@ import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.PropertyAccessor; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import lombok.SneakyThrows; import java.io.IOException; import java.io.InputStream; import java.util.Map; -class JsonGenericDeserializer extends AbstractPravegaDeserializer> { +class JsonGenericDeserializer extends AbstractDeserializer> { private final ObjectMapper objectMapper; JsonGenericDeserializer(String groupId, SchemaRegistryClient client, @@ -33,9 +31,8 @@ class JsonGenericDeserializer extends AbstractPravegaDeserializer deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + protected WithSchema deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { Object obj = objectMapper.readValue(inputStream, Object.class); return new WithSchema<>(writerSchemaInfo, obj, (x, y) -> (Map) y); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java index facd15dc5..7c131f313 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java @@ -16,11 +16,11 @@ import io.pravega.schemaregistry.codec.Codec; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.JSONSchema; -import lombok.SneakyThrows; +import java.io.IOException; import java.io.OutputStream; -class JsonSerializer extends AbstractPravegaSerializer { +class JsonSerializer extends AbstractSerializer { private final ObjectMapper objectMapper; JsonSerializer(String groupId, SchemaRegistryClient client, JSONSchema schema, Codec codec, boolean registerSchema) { @@ -31,9 +31,8 @@ class JsonSerializer extends AbstractPravegaSerializer { objectMapper.setVisibility(PropertyAccessor.CREATOR, JsonAutoDetect.Visibility.ANY); } - @SneakyThrows @Override - protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream) { + protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream) throws IOException { objectMapper.writeValue(outputStream, var); outputStream.flush(); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java index ba4ce2293..ca15c46ba 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java @@ -11,7 +11,6 @@ import io.pravega.client.stream.Serializer; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; import io.pravega.schemaregistry.common.Either; import io.pravega.schemaregistry.schemas.JSONSchema; import lombok.extern.slf4j.Slf4j; @@ -28,22 +27,14 @@ class JsonSerializerFactory { static Serializer serializer(SerializerConfig config, JSONSchema schemaData) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - registerCodec(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); return new JsonSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), config.isRegisterSchema()); } static Serializer deserializer(SerializerConfig config, JSONSchema schemaData) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); @@ -52,9 +43,7 @@ static Serializer deserializer(SerializerConfig config, JSONSchema sch } static Serializer> genericDeserializer(SerializerConfig config) { - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); String groupId = config.getGroupId(); @@ -65,9 +54,7 @@ static Serializer> genericDeserializer(SerializerConfig confi } static Serializer jsonStringDeserializer(SerializerConfig config) { - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); String groupId = config.getGroupId(); @@ -79,12 +66,8 @@ static Serializer jsonStringDeserializer(SerializerConfig config) { static Serializer multiTypeSerializer( SerializerConfig config, Map, JSONSchema> schemas) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - registerCodec(schemaRegistryClient, config); - Map, AbstractPravegaSerializer> serializerMap = schemas + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + Map, AbstractSerializer> serializerMap = schemas .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> new JsonSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), config.isRegisterSchema()))); @@ -94,13 +77,11 @@ static Serializer multiTypeSerializer( static Serializer multiTypeDeserializer( SerializerConfig config, Map, JSONSchema> schemas) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - Map> deserializerMap = schemas + Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); @@ -111,15 +92,11 @@ static Serializer multiTypeDeserializer( static Serializer>> typedOrGenericDeserializer( SerializerConfig config, Map, JSONSchema> schemas) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - Map> deserializerMap = schemas + Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); JsonGenericDeserializer genericDeserializer = new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoder(), diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java index 647e02ae6..6411a0778 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java @@ -11,16 +11,14 @@ import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.PropertyAccessor; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; -import lombok.SneakyThrows; import java.io.IOException; import java.io.InputStream; -class JsonStringDeserializer extends AbstractPravegaDeserializer { +class JsonStringDeserializer extends AbstractDeserializer { private final ObjectMapper objectMapper; JsonStringDeserializer(String groupId, SchemaRegistryClient client, @@ -32,9 +30,8 @@ class JsonStringDeserializer extends AbstractPravegaDeserializer { objectMapper.setVisibility(PropertyAccessor.CREATOR, JsonAutoDetect.Visibility.ANY); } - @SneakyThrows({JsonProcessingException.class, IOException.class}) @Override - protected String deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + protected String deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { Object obj = objectMapper.readValue(inputStream, Object.class); return objectMapper.writeValueAsString(obj); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java index c93137e1d..75487ac4b 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java @@ -13,7 +13,6 @@ import com.google.protobuf.DynamicMessage; import io.pravega.client.stream.Serializer; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; import io.pravega.schemaregistry.schemas.AvroSchema; @@ -30,7 +29,8 @@ import java.util.function.BiFunction; import java.util.function.Function; -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.*; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; import static io.pravega.schemaregistry.serializers.WithSchema.NO_TRANSFORM; /** @@ -72,46 +72,38 @@ static Serializer deserializeAsT(SerializerConfig config, // endregion private static Serializer> serializerInternal(SerializerConfig config, - Map> customSerializers) { + Map> customSerializers) { Preconditions.checkNotNull(config); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - registerCodec(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); String groupId = config.getGroupId(); // if serializer is not already present, create a new serializer. - Function> serializerFunction = + Function> serializerFunction = x -> getPravegaSerializer(config, customSerializers, schemaRegistryClient, groupId, x); return new MultipleFormatSerializer(serializerFunction); } private static Serializer deserializeAsTInternal(SerializerConfig config, - Map> deserializers, + Map> deserializers, BiFunction transform) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - AbstractPravegaDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, + AbstractDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, config.getDecoder(), encodingCache); - AbstractPravegaDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + AbstractDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), encodingCache); - AbstractPravegaDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + AbstractDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), encodingCache); - Map map = new HashMap<>(); + Map map = new HashMap<>(); map.put(SerializationFormat.Json, json); map.put(SerializationFormat.Avro, avro); map.put(SerializationFormat.Protobuf, protobuf); deserializers.forEach((key, value) -> { - map.put(key, new AbstractPravegaDeserializer(groupId, schemaRegistryClient, null, false, config.getDecoder(), encodingCache) { + map.put(key, new AbstractDeserializer(groupId, schemaRegistryClient, null, false, config.getDecoder(), encodingCache) { @Override protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { return value.deserialize(inputStream, writerSchema, readerSchema); @@ -124,29 +116,25 @@ protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, S } private static Serializer> deserializerInternal(SerializerConfig config, Map> deserializers, BiFunction transform) { + CustomDeserializer> deserializers, BiFunction transform) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - AbstractPravegaDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, + AbstractDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, config.getDecoder(), encodingCache); - AbstractPravegaDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + AbstractDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), encodingCache); - AbstractPravegaDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + AbstractDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), encodingCache); - Map map = new HashMap<>(); + Map map = new HashMap<>(); map.put(SerializationFormat.Json, json); map.put(SerializationFormat.Avro, avro); map.put(SerializationFormat.Protobuf, protobuf); deserializers.forEach((key, value) -> { - map.put(key, new AbstractPravegaDeserializer(groupId, schemaRegistryClient, null, false, config.getDecoder(), encodingCache) { + map.put(key, new AbstractDeserializer(groupId, schemaRegistryClient, null, false, config.getDecoder(), encodingCache) { @Override protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { return value.deserialize(inputStream, writerSchema, readerSchema); @@ -159,8 +147,8 @@ protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, S } @SuppressWarnings("unchecked") - private static AbstractPravegaSerializer getPravegaSerializer( - SerializerConfig config, Map> customSerializers, + private static AbstractSerializer getPravegaSerializer( + SerializerConfig config, Map> customSerializers, SchemaRegistryClient schemaRegistryClient, String groupId, SchemaInfo schemaInfo) { switch (schemaInfo.getSerializationFormat()) { case Avro: @@ -169,7 +157,7 @@ private static AbstractPravegaSerializer getPravegaSerializer( case Protobuf: ProtobufSerializer m = new ProtobufSerializer<>(groupId, schemaRegistryClient, ProtobufSchema.from(schemaInfo), config.getCodec(), config.isRegisterSchema()); - return (AbstractPravegaSerializer) m; + return (AbstractSerializer) m; case Json: return new JsonSerializer<>(groupId, schemaRegistryClient, JSONSchema.from(schemaInfo), config.getCodec(), config.isRegisterSchema()); @@ -180,12 +168,12 @@ private static AbstractPravegaSerializer getPravegaSerializer( } } - private static AbstractPravegaSerializer getCustomSerializer( - SerializerConfig config, Map> customSerializers, + private static AbstractSerializer getCustomSerializer( + SerializerConfig config, Map> customSerializers, SchemaRegistryClient schemaRegistryClient, String groupId, SchemaInfo schemaInfo) { if (customSerializers.containsKey(schemaInfo.getSerializationFormat())) { - PravegaSerializer serializer = customSerializers.get(schemaInfo.getSerializationFormat()); - return new AbstractPravegaSerializer(groupId, schemaRegistryClient, + CustomSerializer serializer = customSerializers.get(schemaInfo.getSerializationFormat()); + return new AbstractSerializer(groupId, schemaRegistryClient, () -> schemaInfo, config.getCodec(), config.isRegisterSchema()) { @Override protected void serialize(Object var, SchemaInfo schema, OutputStream outputStream) { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java index 5da2b415e..1f63d1157 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java @@ -14,16 +14,17 @@ import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; +import java.io.IOException; import java.io.InputStream; import java.util.Map; import java.util.function.BiFunction; -class MultiFormatWithSchemaDeserializer extends AbstractPravegaDeserializer> { - private final Map genericDeserializers; +class MultiFormatWithSchemaDeserializer extends AbstractDeserializer> { + private final Map genericDeserializers; private final BiFunction transform; MultiFormatWithSchemaDeserializer(String groupId, SchemaRegistryClient client, - Map genericDeserializers, + Map genericDeserializers, SerializerConfig.Decoder decoder, EncodingCache encodingCache, BiFunction transform) { super(groupId, client, null, false, decoder, encodingCache); @@ -32,7 +33,7 @@ class MultiFormatWithSchemaDeserializer extends AbstractPravegaDeserializer deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + protected WithSchema deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { Preconditions.checkNotNull(writerSchema); Object obj = genericDeserializers.get(writerSchema.getSerializationFormat()).deserialize(inputStream, writerSchema, readerSchema); if (obj instanceof WithSchema) { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java index 3e63a291f..ae4e250bb 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java @@ -14,16 +14,17 @@ import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; +import java.io.IOException; import java.io.InputStream; import java.util.Map; import java.util.function.BiFunction; -class MultipleFormatDeserializer extends AbstractPravegaDeserializer { - private final Map genericDeserializers; +class MultipleFormatDeserializer extends AbstractDeserializer { + private final Map genericDeserializers; private final BiFunction transform; MultipleFormatDeserializer(String groupId, SchemaRegistryClient client, - Map genericDeserializers, + Map genericDeserializers, SerializerConfig.Decoder decoder, EncodingCache encodingCache, BiFunction transform) { super(groupId, client, null, false, decoder, encodingCache); @@ -32,7 +33,7 @@ class MultipleFormatDeserializer extends AbstractPravegaDeserializer { } @Override - protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { Preconditions.checkNotNull(writerSchema); return transform.apply(writerSchema.getSerializationFormat(), genericDeserializers.get(writerSchema.getSerializationFormat()).deserialize(inputStream, writerSchema, readerSchema)); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java index df249f66c..f3e3d6bef 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatSerializer.java @@ -18,19 +18,19 @@ import java.util.function.Function; class MultipleFormatSerializer implements Serializer> { - private final Function> serializerFunction; + private final Function> serializerFunction; - private final ConcurrentHashMap> serializersMap; + private final ConcurrentHashMap> serializersMap; - MultipleFormatSerializer(Function> serializerFunction) { + MultipleFormatSerializer(Function> serializerFunction) { this.serializerFunction = serializerFunction; this.serializersMap = new ConcurrentHashMap<>(); } @Override public ByteBuffer serialize(WithSchema value) { - AbstractPravegaSerializer serializer = serializersMap.computeIfAbsent(value.getSchemaContainer().getSchemaInfo(), - x -> serializerFunction.apply(value.getSchemaContainer().getSchemaInfo())); + AbstractSerializer serializer = serializersMap.computeIfAbsent(value.getSchema().getSchemaInfo(), + x -> serializerFunction.apply(value.getSchema().getSchemaInfo())); return serializer.serialize(value.getObject()); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java index d4a621196..bf761c072 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java @@ -14,16 +14,17 @@ import io.pravega.schemaregistry.common.Either; import io.pravega.schemaregistry.contract.data.SchemaInfo; +import java.io.IOException; import java.io.InputStream; import java.util.Map; -class MultiplexedAndGenericDeserializer extends AbstractPravegaDeserializer> { - private final Map> deserializers; - private final AbstractPravegaDeserializer genericDeserializer; +class MultiplexedAndGenericDeserializer extends AbstractDeserializer> { + private final Map> deserializers; + private final AbstractDeserializer genericDeserializer; MultiplexedAndGenericDeserializer(String groupId, SchemaRegistryClient client, - Map> deserializers, - AbstractPravegaDeserializer genericDeserializer, + Map> deserializers, + AbstractDeserializer genericDeserializer, SerializerConfig.Decoder decoder, EncodingCache encodingCache) { super(groupId, client, null, false, decoder, encodingCache); @@ -32,9 +33,9 @@ class MultiplexedAndGenericDeserializer extends AbstractPravegaDeserialize } @Override - protected Either deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + protected Either deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { Preconditions.checkNotNull(writerSchema); - AbstractPravegaDeserializer deserializer = deserializers.get(writerSchema.getType()); + AbstractDeserializer deserializer = deserializers.get(writerSchema.getType()); if (deserializer == null) { return Either.right(genericDeserializer.deserialize(inputStream, writerSchema, readerSchema)); } else { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java index 73ceac2f5..ca61a2974 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java @@ -14,14 +14,15 @@ import io.pravega.schemaregistry.contract.data.SchemaInfo; import org.apache.commons.lang3.SerializationException; +import java.io.IOException; import java.io.InputStream; import java.util.Map; -class MultiplexedDeserializer extends AbstractPravegaDeserializer { - private final Map> deserializers; +class MultiplexedDeserializer extends AbstractDeserializer { + private final Map> deserializers; MultiplexedDeserializer(String groupId, SchemaRegistryClient client, - Map> deserializers, + Map> deserializers, SerializerConfig.Decoder decoder, EncodingCache encodingCache) { super(groupId, client, null, false, decoder, encodingCache); @@ -29,9 +30,9 @@ class MultiplexedDeserializer extends AbstractPravegaDeserializer { } @Override - protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { + protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException { Preconditions.checkNotNull(writerSchema); - AbstractPravegaDeserializer deserializer = deserializers.get(writerSchema.getType()); + AbstractDeserializer deserializer = deserializers.get(writerSchema.getType()); if (deserializer == null) { throw new SerializationException("deserializer not supplied for type " + writerSchema.getType()); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java index 4a74d6488..540a0355a 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java @@ -15,9 +15,9 @@ import java.util.Map; class MultiplexedSerializer implements Serializer { - private final Map, AbstractPravegaSerializer> serializers; + private final Map, AbstractSerializer> serializers; - MultiplexedSerializer(Map, AbstractPravegaSerializer> serializers) { + MultiplexedSerializer(Map, AbstractSerializer> serializers) { this.serializers = serializers; } @@ -25,7 +25,7 @@ class MultiplexedSerializer implements Serializer { @SuppressWarnings("unchecked") public ByteBuffer serialize(T obj) { Class tClass = (Class) obj.getClass(); - AbstractPravegaSerializer serializer = serializers.get(tClass); + AbstractSerializer serializer = serializers.get(tClass); return serializer.serialize(obj); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java index 5085e871f..c27be25e8 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java @@ -11,14 +11,14 @@ import com.google.common.base.Preconditions; import com.google.protobuf.GeneratedMessageV3; +import com.google.protobuf.InvalidProtocolBufferException; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.ProtobufSchema; -import lombok.SneakyThrows; import java.io.InputStream; -public class ProtobufDeserlizer extends AbstractPravegaDeserializer { +public class ProtobufDeserlizer extends AbstractDeserializer { private final ProtobufSchema protobufSchema; ProtobufDeserlizer(String groupId, SchemaRegistryClient client, ProtobufSchema schema, SerializerConfig.Decoder decoder, @@ -28,9 +28,12 @@ public class ProtobufDeserlizer extends AbstractPr this.protobufSchema = schema; } - @SneakyThrows @Override protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { - return protobufSchema.getParser().parseFrom(inputStream); + try { + return protobufSchema.getParser().parseFrom(inputStream); + } catch (InvalidProtocolBufferException e) { + throw new IllegalArgumentException("Invalid bytes", e); + } } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java index 4725df382..deb040e32 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java @@ -11,9 +11,6 @@ import com.google.common.base.Preconditions; import com.google.common.base.Strings; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; @@ -21,66 +18,72 @@ import io.pravega.schemaregistry.common.NameUtil; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.ProtobufSchema; -import lombok.SneakyThrows; import org.apache.commons.lang3.SerializationException; import javax.annotation.Nullable; +import java.io.IOException; import java.io.InputStream; +import java.util.concurrent.ConcurrentHashMap; -public class ProtobufGenericDeserlizer extends AbstractPravegaDeserializer { - private final LoadingCache knownSchemas; +public class ProtobufGenericDeserlizer extends AbstractDeserializer { + private final ConcurrentHashMap knownSchemas; ProtobufGenericDeserlizer(String groupId, SchemaRegistryClient client, @Nullable ProtobufSchema schema, SerializerConfig.Decoder decoder, EncodingCache encodingCache) { super(groupId, client, schema, false, decoder, encodingCache); Preconditions.checkArgument(isEncodeHeader() || schema != null); - - this.knownSchemas = CacheBuilder.newBuilder().build(new CacheLoader() { - @Override - public Descriptors.Descriptor load(SchemaInfo schemaToUse) throws Exception { - DescriptorProtos.FileDescriptorSet descriptorSet = ProtobufSchema.from(schemaToUse).getDescriptorProto(); - - int count = descriptorSet.getFileCount(); - String[] tokens = NameUtil.extractNameAndQualifier(schemaToUse.getType()); - String name = tokens[0]; - String pckg = tokens[1]; - DescriptorProtos.FileDescriptorProto mainDescriptor = descriptorSet - .getFileList().stream() - .filter(x -> { - boolean match; - if (x.getPackage() == null) { - match = Strings.isNullOrEmpty(pckg); - } else { - match = x.getPackage().equals(pckg); - } - return match && x.getMessageTypeList().stream().anyMatch(y -> y.getName().equals(name)); - }) - .findAny().orElseThrow(IllegalArgumentException::new); - - Descriptors.FileDescriptor[] dependencyArray = new Descriptors.FileDescriptor[count]; - for (int i = 0; i < count; i++) { - Descriptors.FileDescriptor fd = Descriptors.FileDescriptor.buildFrom( - descriptorSet.getFile(i), - new Descriptors.FileDescriptor[]{}); - dependencyArray[i] = fd; - } - - Descriptors.FileDescriptor fd = Descriptors.FileDescriptor.buildFrom(mainDescriptor, dependencyArray); - - return fd.getMessageTypes().stream().filter(x -> x.getName().equals(name)) - .findAny().orElseThrow(() -> new SerializationException(String.format("schema for %s not found", schemaToUse.getType()))); - } - }); + knownSchemas = new ConcurrentHashMap<>(); } - @SneakyThrows @Override - protected DynamicMessage deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + protected DynamicMessage deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { Preconditions.checkArgument(writerSchemaInfo != null || readerSchemaInfo != null); - + SchemaInfo schemaToUse = readerSchemaInfo == null ? writerSchemaInfo : readerSchemaInfo; - Descriptors.Descriptor messageType = knownSchemas.get(schemaToUse); + Descriptors.Descriptor messageType = knownSchemas.computeIfAbsent(schemaToUse, this::parseSchema); return DynamicMessage.parseFrom(messageType, inputStream); } + + private Descriptors.Descriptor parseSchema(SchemaInfo schemaToUse) { + DescriptorProtos.FileDescriptorSet descriptorSet = ProtobufSchema.from(schemaToUse).getDescriptorProto(); + + int count = descriptorSet.getFileCount(); + String[] tokens = NameUtil.extractNameAndQualifier(schemaToUse.getType()); + String name = tokens[0]; + String pckg = tokens[1]; + DescriptorProtos.FileDescriptorProto mainDescriptor = null; + for (DescriptorProtos.FileDescriptorProto x : descriptorSet.getFileList()) { + boolean packageMatch; + if (x.getPackage() == null) { + packageMatch = Strings.isNullOrEmpty(pckg); + } else { + packageMatch = x.getPackage().equals(pckg); + } + if (packageMatch && x.getMessageTypeList().stream().anyMatch(y -> y.getName().equals(name))) { + mainDescriptor = x; + break; + } + } + if (mainDescriptor == null) { + throw new IllegalArgumentException("FileDescriptorSet doesn't contain the schema for the object type."); + } + + Descriptors.FileDescriptor[] dependencyArray = new Descriptors.FileDescriptor[count]; + Descriptors.FileDescriptor fd; + try { + for (int i = 0; i < count; i++) { + fd = Descriptors.FileDescriptor.buildFrom( + descriptorSet.getFile(i), + new Descriptors.FileDescriptor[]{}); + dependencyArray[i] = fd; + } + + fd = Descriptors.FileDescriptor.buildFrom(mainDescriptor, dependencyArray); + } catch (Descriptors.DescriptorValidationException e) { + throw new IllegalArgumentException("Invalid protobuf schema."); + } + return fd.getMessageTypes().stream().filter(x -> x.getName().equals(name)) + .findAny().orElseThrow(() -> new SerializationException(String.format("schema for %s not found", schemaToUse.getType()))); + } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java index 38c294733..f7e858755 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java @@ -14,19 +14,18 @@ import io.pravega.schemaregistry.codec.Codec; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.ProtobufSchema; -import lombok.SneakyThrows; +import java.io.IOException; import java.io.OutputStream; -class ProtobufSerializer extends AbstractPravegaSerializer { +class ProtobufSerializer extends AbstractSerializer { ProtobufSerializer(String groupId, SchemaRegistryClient client, ProtobufSchema schema, Codec codec, boolean registerSchema) { super(groupId, client, schema, codec, registerSchema); } - @SneakyThrows @Override - protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream) { + protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream) throws IOException { var.writeTo(outputStream); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java index 0e666d7ec..1938a1b2f 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java @@ -14,7 +14,6 @@ import com.google.protobuf.Message; import io.pravega.client.stream.Serializer; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; import io.pravega.schemaregistry.common.Either; import io.pravega.schemaregistry.schemas.ProtobufSchema; import lombok.extern.slf4j.Slf4j; @@ -33,11 +32,7 @@ class ProtobufSerializerFactory { static Serializer serializer(SerializerConfig config, ProtobufSchema schemaData) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - registerCodec(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); return new ProtobufSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), config.isRegisterSchema()); } @@ -45,11 +40,7 @@ static Serializer serializer(SerializerConfig config, static Serializer deserializer(SerializerConfig config, ProtobufSchema schemaData) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); @@ -58,14 +49,9 @@ static Serializer deserializer(SerializerConfi } static Serializer genericDeserializer(SerializerConfig config, @Nullable ProtobufSchema schema) { - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); String groupId = config.getGroupId(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); - EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); return new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache); @@ -74,12 +60,9 @@ static Serializer genericDeserializer(SerializerConfig config, @ static Serializer multiTypeSerializer( SerializerConfig config, Map, ProtobufSchema> schemas) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - registerCodec(schemaRegistryClient, config); - Map, AbstractPravegaSerializer> serializerMap = schemas + SchemaRegistryClient schemaRegistryClient = initForSerializer(config); + + Map, AbstractSerializer> serializerMap = schemas .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> new ProtobufSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), config.isRegisterSchema()))); @@ -89,15 +72,11 @@ static Serializer multiTypeSerializer( static Serializer multiTypeDeserializer( SerializerConfig config, Map, ProtobufSchema> schemas) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - Map> deserializerMap = schemas + Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), encodingCache); @@ -106,15 +85,11 @@ static Serializer multiTypeDeserializer( static Serializer> typedOrGenericDeserializer( SerializerConfig config, Map, ProtobufSchema> schemas) { String groupId = config.getGroupId(); - SchemaRegistryClient schemaRegistryClient = config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); - autoCreateGroup(schemaRegistryClient, config); - failOnCodecMismatch(schemaRegistryClient, config); + SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - Map> deserializerMap = schemas + Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); ProtobufGenericDeserlizer genericDeserializer = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index ba12f673d..ba9919afe 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -14,7 +14,6 @@ import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.client.SchemaRegistryClientConfig; import io.pravega.schemaregistry.codec.Codec; -import io.pravega.schemaregistry.codec.CodecFactory; import io.pravega.schemaregistry.common.Either; import io.pravega.schemaregistry.contract.data.Compatibility; import io.pravega.schemaregistry.contract.data.EncodingInfo; @@ -25,24 +24,19 @@ import lombok.Data; import lombok.Getter; +import java.io.IOException; import java.nio.ByteBuffer; import java.util.HashSet; import java.util.Set; import java.util.function.BiFunction; import java.util.function.Function; -import static io.pravega.schemaregistry.codec.CodecFactory.*; - /** * Serializer Config class that is passed to {@link SerializerFactory} for creating serializer. */ @Data @Builder public class SerializerConfig { - private final static Codec NOOP = CodecFactory.none(); - private final static Codec GZIP = CodecFactory.gzip(); - private final static Codec SNAPPY = CodecFactory.snappy(); - /** * Name of the group. */ @@ -107,7 +101,7 @@ private SerializerConfig(String groupId, Either DEFAULT = (x, y) -> { - switch (x) { - case NONE: - return NOOP.decode(y); - case MIME_GZIP: - return GZIP.decode(y); - case MIME_SNAPPY: - return SNAPPY.decode(y); - default: - throw new IllegalArgumentException(); + try { + switch (x) { + case Codecs.Constants.NONE: + return Codecs.None.getCodec().decode(y); + case Codecs.Constants.APPLICATION_X_GZIP: + return Codecs.GzipCompressor.getCodec().decode(y); + case Codecs.Constants.APPLICATION_X_SNAPPY_FRAMED: + return Codecs.SnappyCompressor.getCodec().decode(y); + default: + throw new IllegalArgumentException("Unknown codec"); + } + } catch (IOException ex) { + throw new RuntimeException(ex); } }; @@ -225,18 +223,18 @@ private Decoder(String codecType, Function decoder) { } }; codecTypes = new HashSet<>(); - this.codecTypes.add(NONE); - this.codecTypes.add(MIME_GZIP); - this.codecTypes.add(MIME_SNAPPY); + this.codecTypes.add(Codecs.Constants.NONE); + this.codecTypes.add(Codecs.Constants.APPLICATION_X_GZIP); + this.codecTypes.add(Codecs.Constants.APPLICATION_X_SNAPPY_FRAMED); this.codecTypes.add(codecType); } private Decoder() { this.decoder = DEFAULT; codecTypes = new HashSet<>(); - this.codecTypes.add(NONE); - this.codecTypes.add(MIME_GZIP); - this.codecTypes.add(MIME_SNAPPY); + this.codecTypes.add(Codecs.Constants.NONE); + this.codecTypes.add(Codecs.Constants.APPLICATION_X_GZIP); + this.codecTypes.add(Codecs.Constants.APPLICATION_X_SNAPPY_FRAMED); } ByteBuffer decode(String codecType, ByteBuffer bytes) { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index d9be7201e..f30f2ba67 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -19,7 +19,7 @@ import io.pravega.schemaregistry.schemas.AvroSchema; import io.pravega.schemaregistry.schemas.JSONSchema; import io.pravega.schemaregistry.schemas.ProtobufSchema; -import io.pravega.schemaregistry.schemas.SchemaContainer; +import io.pravega.schemaregistry.schemas.Schema; import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; @@ -328,7 +328,7 @@ public static Serializer>> jsonTypedOrGenericDe // region custom /** - * A serializer that uses user supplied implementation of {@link PravegaSerializer} for serializing the objects. + * A serializer that uses user supplied implementation of {@link CustomSerializer} for serializing the objects. * It also takes user supplied schema and registers/validates it against the registry. * * @param config Serializer config. @@ -337,12 +337,12 @@ public static Serializer>> jsonTypedOrGenericDe * @param Type of object to serialize * @return Serializer that uses user supplied serialization function for serializing events. */ - public static Serializer customSerializer(SerializerConfig config, SchemaContainer schema, PravegaSerializer serializer) { + public static Serializer customSerializer(SerializerConfig config, Schema schema, CustomSerializer serializer) { return CustomSerializerFactory.serializer(config, schema, serializer); } /** - * A deserializer that uses user supplied implementation of {@link PravegaDeserializer} for deserializing the data into + * A deserializer that uses user supplied implementation of {@link CustomDeserializer} for deserializing the data into * typed java objects. * * @param config Serializer config. @@ -351,8 +351,8 @@ public static Serializer customSerializer(SerializerConfig config, Schema * @param Type of object to deserialize * @return Deserializer that uses user supplied deserialization function for deserializing payload into typed events. */ - public static Serializer customDeserializer(SerializerConfig config, @Nullable SchemaContainer schema, - PravegaDeserializer deserializer) { + public static Serializer customDeserializer(SerializerConfig config, @Nullable Schema schema, + CustomDeserializer deserializer) { return CustomSerializerFactory.deserializer(config, schema, deserializer); } // endregion diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java index 04649f4ba..5c6560982 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java @@ -10,25 +10,47 @@ package io.pravega.schemaregistry.serializers; import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; import lombok.extern.slf4j.Slf4j; import java.util.List; @Slf4j class SerializerFactoryHelper { - static void autoCreateGroup(SchemaRegistryClient client, SerializerConfig config) { + static SchemaRegistryClient initForSerializer(SerializerConfig config) { + SchemaRegistryClient schemaRegistryClient = getSchemaRegistryClient(config); + createGroup(schemaRegistryClient, config); + registerCodec(schemaRegistryClient, config); + return schemaRegistryClient; + } + + static SchemaRegistryClient initForDeserializer(SerializerConfig config) { + SchemaRegistryClient schemaRegistryClient = getSchemaRegistryClient(config); + + createGroup(schemaRegistryClient, config); + failOnCodecMismatch(schemaRegistryClient, config); + return schemaRegistryClient; + } + + private static SchemaRegistryClient getSchemaRegistryClient(SerializerConfig config) { + return config.getRegistryConfigOrClient().isLeft() ? + SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + config.getRegistryConfigOrClient().getRight(); + } + + private static void createGroup(SchemaRegistryClient client, SerializerConfig config) { if (config.isCreateGroup()) { client.addGroup(config.getGroupId(), config.getGroupProperties()); } } - static void registerCodec(SchemaRegistryClient client, SerializerConfig config) { + private static void registerCodec(SchemaRegistryClient client, SerializerConfig config) { if (config.isRegisterCodec()) { client.addCodecType(config.getGroupId(), config.getCodec().getCodecType()); } } - static void failOnCodecMismatch(SchemaRegistryClient client, SerializerConfig config) { + private static void failOnCodecMismatch(SchemaRegistryClient client, SerializerConfig config) { if (config.isFailOnCodecMismatch()) { List codecTypesInGroup = client.getCodecTypes(config.getGroupId()); if (!config.getDecoder().getCodecTypes().containsAll(codecTypesInGroup)) { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java index 9216f79db..30cf3ce94 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java @@ -9,31 +9,33 @@ */ package io.pravega.schemaregistry.serializers; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.module.jsonSchema.JsonSchema; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DynamicMessage; +import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.util.JsonFormat; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; import io.pravega.schemaregistry.schemas.AvroSchema; import io.pravega.schemaregistry.schemas.JSONSchema; import io.pravega.schemaregistry.schemas.ProtobufSchema; -import io.pravega.schemaregistry.schemas.SchemaContainer; +import io.pravega.schemaregistry.schemas.Schema; import lombok.AccessLevel; import lombok.Getter; -import lombok.SneakyThrows; -import org.apache.avro.Schema; import org.apache.avro.generic.IndexedRecord; import java.util.function.BiFunction; /** - * Container class for object with its corresponding schema. + * Container class for a deserialized object with its corresponding schema. + * * @param Type of object. */ public class WithSchema { public static final BiFunction JSON_TRANSFORM = WithSchema::toJsonString; + public static final BiFunction NO_TRANSFORM = (x, y) -> y; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); @@ -41,7 +43,7 @@ public class WithSchema { .usingTypeRegistry(JsonFormat.TypeRegistry.newBuilder().build()); @Getter(AccessLevel.PACKAGE) - private final SchemaContainer schemaContainer; + private final Schema schema; @Getter private final Object object; private final BiFunction transform; @@ -50,45 +52,51 @@ public class WithSchema { this.object = obj; this.transform = transform; if (schemaInfo != null) { - switch (schemaInfo.getSerializationFormat()) { - case Avro: - schemaContainer = AvroSchema.from(schemaInfo); - break; - case Protobuf: - schemaContainer = ProtobufSchema.from(schemaInfo); - break; - case Json: - schemaContainer = JSONSchema.from(schemaInfo); - break; - case Custom: - schemaContainer = () -> schemaInfo; - break; - default: - throw new IllegalArgumentException("Serialization format not supported"); - } + this.schema = convertToSchema(schemaInfo); } else { - schemaContainer = null; + this.schema = null; } } + private Schema convertToSchema(SchemaInfo schemaInfo) { + Schema schema; + switch (schemaInfo.getSerializationFormat()) { + case Avro: + schema = AvroSchema.from(schemaInfo); + break; + case Protobuf: + schema = ProtobufSchema.from(schemaInfo); + break; + case Json: + schema = JSONSchema.from(schemaInfo); + break; + case Custom: + schema = () -> schemaInfo; + break; + default: + throw new IllegalArgumentException("Serialization format not supported"); + } + return schema; + } + /** * Check whether the schema is of type Avro. * * @return True if the schema is for avro, false otherwise. */ public boolean hasAvroSchema() { - return schemaContainer instanceof AvroSchema; + return schema instanceof AvroSchema; } /** * Avro Schema for the underlying deserialized object. This is available if {@link WithSchema#hasAvroSchema()} returns true. * This means underlying object was serialized as avro. * - * @return Protobuf {@link Schema} representing the schema for the object. + * @return Protobuf {@link org.apache.avro.Schema} representing the schema for the object. */ @SuppressWarnings("unchecked") - public Schema getAvroSchema() { - return ((AvroSchema) schemaContainer).getSchema(); + public org.apache.avro.Schema getAvroSchema() { + return ((AvroSchema) schema).getSchema(); } /** @@ -97,7 +105,7 @@ public Schema getAvroSchema() { * @return True if the schema is for protobuf, false otherwise. */ public boolean hasProtobufSchema() { - return schemaContainer instanceof ProtobufSchema; + return schema instanceof ProtobufSchema; } /** @@ -108,7 +116,7 @@ public boolean hasProtobufSchema() { */ @SuppressWarnings("unchecked") public DescriptorProtos.FileDescriptorSet getProtobufSchema() { - return ((ProtobufSchema) schemaContainer).getDescriptorProto(); + return ((ProtobufSchema) schema).getDescriptorProto(); } /** @@ -117,7 +125,7 @@ public DescriptorProtos.FileDescriptorSet getProtobufSchema() { * @return True if the schema is for json, false otherwise */ public boolean hasJsonSchema() { - return schemaContainer instanceof JSONSchema; + return schema instanceof JSONSchema; } /** @@ -128,7 +136,7 @@ public boolean hasJsonSchema() { */ @SuppressWarnings("unchecked") public JsonSchema getJsonSchema() { - return ((JSONSchema) schemaContainer).getSchema(); + return ((JSONSchema) schema).getSchema(); } /** @@ -137,10 +145,10 @@ public JsonSchema getJsonSchema() { * @return Transformed object of type T. */ public T getTransformed() { - if (schemaContainer == null) { - throw new IllegalArgumentException(); + if (schema == null) { + throw new IllegalArgumentException("Need schema to be able to transform."); } - return transform.apply(schemaContainer.getSchemaInfo().getSerializationFormat(), object); + return transform.apply(schema.getSchemaInfo().getSerializationFormat(), object); } /** @@ -149,31 +157,34 @@ public T getTransformed() { * @return Json String for the object. */ public String getJsonString() { - if (schemaContainer == null) { + if (schema == null) { throw new IllegalArgumentException(); } - return JSON_TRANSFORM.apply(schemaContainer.getSchemaInfo().getSerializationFormat(), object); + return JSON_TRANSFORM.apply(schema.getSchemaInfo().getSerializationFormat(), object); } - @SneakyThrows private static String toJsonString(SerializationFormat format, Object deserialize) { String jsonString; - switch (format) { - case Avro: - if (deserialize instanceof IndexedRecord) { - jsonString = deserialize.toString(); - } else { + try { + switch (format) { + case Avro: + if (deserialize instanceof IndexedRecord) { + jsonString = deserialize.toString(); + } else { + jsonString = OBJECT_MAPPER.writeValueAsString(deserialize); + } + break; + case Protobuf: + jsonString = PRINTER.print((DynamicMessage) deserialize); + break; + case Json: + jsonString = OBJECT_MAPPER.writeValueAsString(((WithSchema) deserialize).object); + break; + default: jsonString = OBJECT_MAPPER.writeValueAsString(deserialize); - } - break; - case Protobuf: - jsonString = PRINTER.print((DynamicMessage) deserialize); - break; - case Json: - jsonString = OBJECT_MAPPER.writeValueAsString(((WithSchema) deserialize).object); - break; - default: - jsonString = OBJECT_MAPPER.writeValueAsString(deserialize); + } + } catch (InvalidProtocolBufferException | JsonProcessingException e) { + throw new IllegalArgumentException("Invalid deserialized object. Failed to convert to json string.", e); } return jsonString; } diff --git a/serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java b/serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java index 322fb02f6..846337065 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java @@ -20,7 +20,7 @@ public class GroupIdTest { @Test public void testGroupId() throws UnsupportedEncodingException { - String groupId = GroupIdGenerator.getGroupId(GroupIdGenerator.Type.QualifiedStreamName, "scope", "stream"); + String groupId = GroupIdGenerator.getGroupId(GroupIdGenerator.Scheme.QualifiedStreamName, "scope", "stream"); assertEquals(URLDecoder.decode(groupId, Charsets.UTF_8.toString()), "scope/stream"); } diff --git a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java index 969eebb7e..e85664c61 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java @@ -10,8 +10,10 @@ package io.pravega.schemaregistry.codec; import com.google.common.base.Charsets; +import io.pravega.schemaregistry.serializers.Codecs; import org.junit.Test; +import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; @@ -21,24 +23,24 @@ public class CodecTest { @Test - public void testCodec() { + public void testCodec() throws IOException { byte[] testStringBytes = "this is a test string".getBytes(Charsets.UTF_8); - Codec snappy = CodecFactory.snappy(); - assertEquals(snappy.getCodecType(), CodecFactory.MIME_SNAPPY); + Codec snappy = Codecs.SnappyCompressor.getCodec(); + assertEquals(snappy.getCodecType(), Codecs.SnappyCompressor.getMimeType()); ByteBuffer encoded = snappy.encode(ByteBuffer.wrap(testStringBytes)); assertFalse(Arrays.equals(encoded.array(), testStringBytes)); ByteBuffer decoded = snappy.decode(encoded); assertTrue(Arrays.equals(decoded.array(), testStringBytes)); - Codec gzip = CodecFactory.gzip(); - assertEquals(gzip.getCodecType(), CodecFactory.MIME_GZIP); + Codec gzip = Codecs.GzipCompressor.getCodec(); + assertEquals(gzip.getCodecType(), Codecs.GzipCompressor.getMimeType()); encoded = gzip.encode(ByteBuffer.wrap(testStringBytes)); assertFalse(Arrays.equals(encoded.array(), testStringBytes)); decoded = gzip.decode(encoded); assertTrue(Arrays.equals(decoded.array(), testStringBytes)); - Codec none = CodecFactory.none(); - assertEquals(none.getCodecType(), CodecFactory.NONE); + Codec none = Codecs.None.getCodec(); + assertEquals(none.getCodecType(), Codecs.None.getMimeType()); encoded = none.encode(ByteBuffer.wrap(testStringBytes)); assertTrue(Arrays.equals(encoded.array(), testStringBytes)); decoded = none.decode(encoded); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java b/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java index 0d397971c..a325d8ed7 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java @@ -9,6 +9,7 @@ */ package io.pravega.schemaregistry.schemas; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DynamicMessage; import com.google.protobuf.GeneratedMessageV3; @@ -83,7 +84,7 @@ public void testProtobufSchema() throws IOException { } @Test - public void testJsonSchema() { + public void testJsonSchema() throws JsonProcessingException { JSONSchema schema = JSONSchema.of(User.class); assertNotNull(schema.getSchema()); assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java index d0eea92f3..8e7bfed7d 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java @@ -11,7 +11,6 @@ import com.google.common.collect.ImmutableMap; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.codec.CodecFactory; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.SchemaInfo; @@ -20,6 +19,7 @@ import org.junit.Test; import java.nio.ByteBuffer; +import java.util.concurrent.ExecutionException; import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.eq; @@ -28,13 +28,13 @@ public class CacheTest { @Test - public void testCache() { + public void testCache() throws ExecutionException { SchemaRegistryClient client = mock(SchemaRegistryClient.class); String groupId = "groupId"; EncodingId encodingId = new EncodingId(0); EncodingInfo encodingInfo = new EncodingInfo(new VersionInfo("name", 0, 0), new SchemaInfo("name", SerializationFormat.Avro, ByteBuffer.wrap(new byte[0]), ImmutableMap.of()), - CodecFactory.snappy().getCodecType()); + Codecs.SnappyCompressor.getCodec().getCodecType()); doAnswer(x -> encodingInfo).when(client).getEncodingInfo(eq(groupId), eq(encodingId)); EncodingCache cache = new EncodingCache(groupId, client); assertEquals(encodingInfo, cache.getGroupEncodingInfo(encodingId)); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index 48ca2baef..909b57772 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -9,6 +9,7 @@ */ package io.pravega.schemaregistry.serializers; +import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import com.google.protobuf.DescriptorProtos; @@ -16,7 +17,6 @@ import com.google.protobuf.GeneratedMessageV3; import io.pravega.client.stream.Serializer; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.codec.CodecFactory; import io.pravega.schemaregistry.common.Either; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; @@ -74,15 +74,15 @@ public void testAvroSerializers() { doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); AvroSchema of = AvroSchema.of(SchemaDefinitions.ENUM); VersionInfo versionInfo3 = new VersionInfo(of.getSchema().getFullName(), 0, 2); doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(of.getSchemaInfo())); doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); - doAnswer(x -> new EncodingInfo(versionInfo3, of.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + doAnswer(x -> new EncodingInfo(versionInfo3, of.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); Serializer serializerStr = SerializerFactory.avroSerializer(config, of); GenericData.EnumSymbol enumSymbol = new GenericData.EnumSymbol(of.getSchema(), "a"); @@ -155,7 +155,7 @@ public void testAvroSerializersReflect() { .when(client).getGroupProperties(anyString()); doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); Serializer serializer = SerializerFactory.avroSerializer(config, schema1); @@ -184,8 +184,8 @@ public void testProtobufSerializers() throws IOException { doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); Serializer serializer = SerializerFactory.protobufSerializer(config, schema1); @@ -234,7 +234,7 @@ public void testProtobufSerializers() throws IOException { } @Test - public void testJsonSerializers() { + public void testJsonSerializers() throws JsonProcessingException { SchemaRegistryClient client = mock(SchemaRegistryClient.class); SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); JSONSchema schema1 = JSONSchema.of(DerivedUser1.class); @@ -248,8 +248,8 @@ public void testJsonSerializers() { doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); Serializer serializer = SerializerFactory.jsonSerializer(config, schema1); @@ -263,7 +263,7 @@ public void testJsonSerializers() { serialized = serializer.serialize(user1); Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); WithSchema generic = genericDeserializer.deserialize(serialized); - assertEquals(((JSONSchema) generic.getSchemaContainer()).getSchema(), schema1.getSchema()); + assertEquals(((JSONSchema) generic.getSchema()).getSchema(), schema1.getSchema()); assertEquals(((Map) generic.getObject()).size(), 4); serialized = serializer.serialize(user1); @@ -277,7 +277,7 @@ public void testJsonSerializers() { VersionInfo versionInfo3 = new VersionInfo("myData", 0, 2); doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(myData.getSchemaInfo())); doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); - doAnswer(x -> new EncodingInfo(versionInfo3, myData.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + doAnswer(x -> new EncodingInfo(versionInfo3, myData.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); Serializer serializer2 = SerializerFactory.jsonSerializer(config, myData); Map jsonObject = new HashMap<>(); @@ -341,9 +341,9 @@ public void testMultiformatDeserializers() throws IOException { doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); - doAnswer(x -> new EncodingInfo(versionInfo3, schema3.getSchemaInfo(), CodecFactory.NONE)).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> new EncodingInfo(versionInfo3, schema3.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); Serializer avroSerializer = SerializerFactory.avroSerializer(config, schema1); @@ -448,7 +448,7 @@ public void testNoEncodingJson() throws IOException { WithSchema generic = genericDeserializer.deserialize(serialized); assertNotNull(generic.getObject()); - assertNull(generic.getSchemaContainer()); + assertNull(generic.getSchema()); } @Data From 572f34fa8d7a299dce182f8fc161c11ad88fcb82 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Sat, 4 Jul 2020 18:17:10 -0700 Subject: [PATCH 46/70] javadoc Signed-off-by: Shivesh Ranjan --- .../serializers/MultiFormatSerializerFactory.java | 2 +- .../schemaregistry/serializers/MultiplexedDeserializer.java | 6 ++++++ .../schemaregistry/serializers/MultiplexedSerializer.java | 5 +++++ 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java index 75487ac4b..b24ee9b31 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java @@ -39,7 +39,7 @@ * for multiple formats. */ @Slf4j -public class MultiFormatSerializerFactory { +class MultiFormatSerializerFactory { // region multi format static Serializer> serializer(SerializerConfig config) { return serializerInternal(config, Collections.emptyMap()); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java index ca61a2974..7bcdc1b44 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java @@ -18,6 +18,12 @@ import java.io.InputStream; import java.util.Map; +/** + * Deserializer which multiplexes for multiple object types. Based on the supplied object, it invokes the + * deserializer for that object type. + * + * @param Type of object. + */ class MultiplexedDeserializer extends AbstractDeserializer { private final Map> deserializers; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java index 540a0355a..34392b620 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedSerializer.java @@ -14,6 +14,11 @@ import java.nio.ByteBuffer; import java.util.Map; +/** + * Serializer to multiplex serialization of multiple types of events with same serialization format. + * + * @param Type of object. + */ class MultiplexedSerializer implements Serializer { private final Map, AbstractSerializer> serializers; From cf82e0792b5b3e7d4b3a9d1ef9a9129fb2acdd9d Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Tue, 7 Jul 2020 20:21:31 -0700 Subject: [PATCH 47/70] encode header as a config param Signed-off-by: Shivesh Ranjan --- .../serializers/AbstractDeserializer.java | 22 +++----- .../serializers/AbstractSerializer.java | 22 +++----- .../serializers/AvroDeserlizer.java | 2 +- .../serializers/AvroGenericDeserlizer.java | 2 +- .../serializers/AvroSerializer.java | 2 +- .../serializers/AvroSerializerFactory.java | 26 ++++++--- .../serializers/CustomSerializerFactory.java | 10 +++- .../serializers/JsonDeserlizer.java | 4 +- .../serializers/JsonGenericDeserializer.java | 14 ++--- .../serializers/JsonSerializer.java | 4 +- .../serializers/JsonSerializerFactory.java | 48 +++++++++++----- .../serializers/JsonStringDeserializer.java | 4 +- .../MultiFormatSerializerFactory.java | 28 +++++++--- .../MultiFormatWithSchemaDeserializer.java | 2 +- .../MultipleFormatDeserializer.java | 2 +- .../MultiplexedAndGenericDeserializer.java | 2 +- .../serializers/MultiplexedDeserializer.java | 2 +- .../serializers/ProtobufDeserlizer.java | 4 +- .../ProtobufGenericDeserlizer.java | 4 +- .../serializers/ProtobufSerializer.java | 4 +- .../ProtobufSerializerFactory.java | 44 +++++++++++---- .../serializers/SerializerConfig.java | 19 ++++++- .../serializers/SerializerFactory.java | 13 +++-- .../serializers/SerializerTest.java | 56 ++++++++++++++----- 24 files changed, 218 insertions(+), 122 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java index c4c43c3cd..aa13f8b88 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java @@ -24,8 +24,6 @@ import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; @Slf4j abstract class AbstractDeserializer extends FailingSerializer { @@ -37,22 +35,23 @@ abstract class AbstractDeserializer extends FailingSerializer { // This can be null. If no schema is supplied, it means the intent is to deserialize into writer schema. // If headers are not encoded, then this will be the latest schema from the registry private final SchemaInfo schemaInfo; - private final AtomicBoolean encodeHeader; + private final boolean encodeHeader; private final SerializerConfig.Decoder decoder; private final boolean skipHeaders; private final EncodingCache encodingCache; - + protected AbstractDeserializer(String groupId, SchemaRegistryClient client, @Nullable Schema schema, boolean skipHeaders, SerializerConfig.Decoder decoder, - EncodingCache encodingCache) { + EncodingCache encodingCache, + boolean encodeHeader) { this.groupId = groupId; this.client = client; this.encodingCache = encodingCache; this.schemaInfo = schema == null ? null : schema.getSchemaInfo(); - this.encodeHeader = new AtomicBoolean(); + this.encodeHeader = encodeHeader; this.skipHeaders = skipHeaders; this.decoder = decoder; @@ -63,18 +62,13 @@ protected AbstractDeserializer(String groupId, private void initialize() { GroupProperties groupProperties = client.getGroupProperties(groupId); - Map properties = groupProperties.getProperties(); - boolean toEncodeHeader = !properties.containsKey(SerializerFactory.ENCODE) || - Boolean.parseBoolean(properties.get(SerializerFactory.ENCODE)); - this.encodeHeader.set(toEncodeHeader); - if (schemaInfo != null) { log.info("Validate caller supplied schema."); if (!client.canReadUsing(groupId, schemaInfo)) { throw new IllegalArgumentException("Cannot read using schema" + schemaInfo.getType()); } } else { - if (!this.encodeHeader.get()) { + if (!this.encodeHeader) { log.warn("No reader schema is supplied and stream does not have encoding headers."); } } @@ -84,7 +78,7 @@ private void initialize() { @Override public T deserialize(ByteBuffer data) { int start = data.arrayOffset() + data.position(); - if (this.encodeHeader.get()) { + if (this.encodeHeader) { SchemaInfo writerSchema = null; ByteBuffer decoded; if (skipHeaders) { @@ -119,6 +113,6 @@ public T deserialize(ByteBuffer data) { protected abstract T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException; boolean isEncodeHeader() { - return encodeHeader.get(); + return encodeHeader; } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java index ac1c26fd8..27df460cc 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java @@ -13,7 +13,6 @@ import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.codec.Codec; import io.pravega.schemaregistry.contract.data.EncodingId; -import io.pravega.schemaregistry.contract.data.GroupProperties; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.VersionInfo; import io.pravega.schemaregistry.schemas.Schema; @@ -24,8 +23,6 @@ import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; abstract class AbstractSerializer extends FailingSerializer { @@ -35,17 +32,18 @@ abstract class AbstractSerializer extends FailingSerializer { private final SchemaInfo schemaInfo; private final AtomicReference encodingId; - private final AtomicBoolean encodeHeader; + private final boolean encodeHeader; private final SchemaRegistryClient client; @Getter private final Codec codec; private final boolean registerSchema; - + protected AbstractSerializer(String groupId, SchemaRegistryClient client, Schema schema, Codec codec, - boolean registerSchema) { + boolean registerSchema, + boolean encodeHeader) { Preconditions.checkNotNull(groupId); Preconditions.checkNotNull(client); Preconditions.checkNotNull(codec); @@ -57,17 +55,11 @@ protected AbstractSerializer(String groupId, this.registerSchema = registerSchema; this.encodingId = new AtomicReference<>(); this.codec = codec; - this.encodeHeader = new AtomicBoolean(); + this.encodeHeader = encodeHeader; initialize(); } private void initialize() { - GroupProperties groupProperties = client.getGroupProperties(groupId); - - Map properties = groupProperties.getProperties(); - boolean toEncodeHeader = !properties.containsKey(SerializerFactory.ENCODE) || - Boolean.parseBoolean(properties.get(SerializerFactory.ENCODE)); - encodeHeader.set(toEncodeHeader); VersionInfo version; if (registerSchema) { // register schema @@ -76,7 +68,7 @@ private void initialize() { // get already registered schema version. If schema is not registered, this will throw an exception. version = client.getVersionForSchema(groupId, schemaInfo); } - if (toEncodeHeader) { + if (encodeHeader) { encodingId.set(client.getEncodingId(groupId, version, codec.getCodecType())); } } @@ -98,7 +90,7 @@ public ByteBuffer serialize(T obj) { byte[] serialized = dataStream.toByteArray(); ByteBuffer byteBuffer; - if (this.encodeHeader.get()) { + if (this.encodeHeader) { Preconditions.checkNotNull(schemaInfo); ByteBuffer encoded = codec.encode(ByteBuffer.wrap(serialized)); int bufferSize = 5 + encoded.remaining(); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java index 9863320fa..4ab30e002 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java @@ -32,7 +32,7 @@ class AvroDeserlizer extends AbstractDeserializer { AvroDeserlizer(String groupId, SchemaRegistryClient client, AvroSchema schema, SerializerConfig.Decoder decoder, EncodingCache encodingCache) { - super(groupId, client, schema, false, decoder, encodingCache); + super(groupId, client, schema, false, decoder, encodingCache, true); Preconditions.checkNotNull(schema); this.avroSchema = schema; this.knownSchemas = new ConcurrentHashMap<>(); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java index 9d3871eec..8f86de555 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java @@ -28,7 +28,7 @@ class AvroGenericDeserlizer extends AbstractDeserializer { AvroGenericDeserlizer(String groupId, SchemaRegistryClient client, @Nullable AvroSchema schema, SerializerConfig.Decoder decoder, EncodingCache encodingCache) { - super(groupId, client, schema, false, decoder, encodingCache); + super(groupId, client, schema, false, decoder, encodingCache, true); this.knownSchemas = new ConcurrentHashMap<>(); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java index 2a0169a2f..90550ee01 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java @@ -29,7 +29,7 @@ class AvroSerializer extends AbstractSerializer { private final AvroSchema avroSchema; AvroSerializer(String groupId, SchemaRegistryClient client, AvroSchema schema, Codec codec, boolean registerSchema) { - super(groupId, client, schema, codec, registerSchema); + super(groupId, client, schema, codec, registerSchema, true); this.avroSchema = schema; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java index 46c28193b..324ba7cf6 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java @@ -20,43 +20,49 @@ import java.util.Map; import java.util.stream.Collectors; -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.*; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; /** * Internal Factory class for Avro serializers and deserializers. */ @Slf4j class AvroSerializerFactory { - static Serializer serializer(SerializerConfig config, AvroSchema schemaData) { + static Serializer serializer(SerializerConfig config, AvroSchema schema) { Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemaData); + Preconditions.checkNotNull(schema); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); String groupId = config.getGroupId(); - return new AvroSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), config.isRegisterSchema()); + return new AvroSerializer<>(groupId, schemaRegistryClient, schema, config.getCodec(), config.isRegisterSchema()); } - static Serializer deserializer(SerializerConfig config, AvroSchema schemaData) { + static Serializer deserializer(SerializerConfig config, AvroSchema schema) { Preconditions.checkNotNull(config); - Preconditions.checkNotNull(schemaData); + Preconditions.checkNotNull(schema); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); String groupId = config.getGroupId(); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - return new AvroDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + return new AvroDeserlizer<>(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache); } - static Serializer genericDeserializer(SerializerConfig config, @Nullable AvroSchema schemaData) { + static Serializer genericDeserializer(SerializerConfig config, @Nullable AvroSchema schema) { + Preconditions.checkNotNull(config); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - return new AvroGenericDeserlizer(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + return new AvroGenericDeserlizer(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache); } static Serializer multiTypeSerializer(SerializerConfig config, Map, AvroSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); @@ -71,6 +77,7 @@ static Serializer multiTypeDeserializer( SerializerConfig config, Map, AvroSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); @@ -88,6 +95,7 @@ static Serializer> typedOrGenericDeserializer( SerializerConfig config, Map, AvroSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java index 35f0d767a..d4fb73731 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java @@ -9,6 +9,7 @@ */ package io.pravega.schemaregistry.serializers; +import com.google.common.base.Preconditions; import io.pravega.client.stream.Serializer; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; @@ -28,10 +29,13 @@ @Slf4j class CustomSerializerFactory { static Serializer serializer(SerializerConfig config, Schema schema, CustomSerializer serializer) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schema); + Preconditions.checkNotNull(serializer); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); return new AbstractSerializer(groupId, schemaRegistryClient, - schema, config.getCodec(), config.isRegisterSchema()) { + schema, config.getCodec(), config.isRegisterSchema(), config.isTagWithEncodingId()) { @Override protected void serialize(T var, SchemaInfo schema, OutputStream outputStream) { serializer.serialize(var, schema, outputStream); @@ -41,6 +45,8 @@ protected void serialize(T var, SchemaInfo schema, OutputStream outputStream) { static Serializer deserializer(SerializerConfig config, @Nullable Schema schema, CustomDeserializer deserializer) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(deserializer); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); @@ -48,7 +54,7 @@ static Serializer deserializer(SerializerConfig config, @Nullable Schema< EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); return new AbstractDeserializer(groupId, schemaRegistryClient, schema, false, - config.getDecoder(), encodingCache) { + config.getDecoder(), encodingCache, config.isTagWithEncodingId()) { @Override protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { return deserializer.deserialize(inputStream, writerSchema, readerSchema); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java index 0996e3c05..6df7b03d1 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java @@ -27,8 +27,8 @@ class JsonDeserlizer extends AbstractDeserializer { JsonDeserlizer(String groupId, SchemaRegistryClient client, JSONSchema schema, - SerializerConfig.Decoder decoder, EncodingCache encodingCache) { - super(groupId, client, schema, true, decoder, encodingCache); + SerializerConfig.Decoder decoder, EncodingCache encodingCache, boolean encodeHeader) { + super(groupId, client, schema, true, decoder, encodingCache, encodeHeader); Preconditions.checkNotNull(schema); this.jsonSchema = schema; this.objectMapper = new ObjectMapper(); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java index ceee7fd94..0b1b9deaa 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java @@ -11,20 +11,20 @@ import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.PropertyAccessor; +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.SchemaInfo; import java.io.IOException; import java.io.InputStream; -import java.util.Map; -class JsonGenericDeserializer extends AbstractDeserializer> { +class JsonGenericDeserializer extends AbstractDeserializer> { private final ObjectMapper objectMapper; JsonGenericDeserializer(String groupId, SchemaRegistryClient client, - SerializerConfig.Decoder decoder, EncodingCache encodingCache) { - super(groupId, client, null, false, decoder, encodingCache); + SerializerConfig.Decoder decoder, EncodingCache encodingCache, boolean encodeHeader) { + super(groupId, client, null, false, decoder, encodingCache, encodeHeader); this.objectMapper = new ObjectMapper(); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); objectMapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); @@ -32,8 +32,8 @@ class JsonGenericDeserializer extends AbstractDeserializer> { } @Override - protected WithSchema deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { - Object obj = objectMapper.readValue(inputStream, Object.class); - return new WithSchema<>(writerSchemaInfo, obj, (x, y) -> (Map) y); + protected WithSchema deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { + JsonNode obj = objectMapper.readTree(inputStream); + return new WithSchema<>(writerSchemaInfo, obj, (x, y) -> (JsonNode) y); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java index 7c131f313..8b963d9a1 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java @@ -23,8 +23,8 @@ class JsonSerializer extends AbstractSerializer { private final ObjectMapper objectMapper; JsonSerializer(String groupId, SchemaRegistryClient client, JSONSchema schema, - Codec codec, boolean registerSchema) { - super(groupId, client, schema, codec, registerSchema); + Codec codec, boolean registerSchema, boolean encodeHeader) { + super(groupId, client, schema, codec, registerSchema, encodeHeader); objectMapper = new ObjectMapper(); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); objectMapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java index ca15c46ba..009678a09 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java @@ -9,6 +9,8 @@ */ package io.pravega.schemaregistry.serializers; +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.base.Preconditions; import io.pravega.client.stream.Serializer; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.common.Either; @@ -18,31 +20,38 @@ import java.util.Map; import java.util.stream.Collectors; -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.*; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; /** * Internal Factory class for json serializers and deserializers. */ @Slf4j class JsonSerializerFactory { - static Serializer serializer(SerializerConfig config, JSONSchema schemaData) { + static Serializer serializer(SerializerConfig config, JSONSchema schema) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schema); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); - return new JsonSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), - config.isRegisterSchema()); + return new JsonSerializer<>(groupId, schemaRegistryClient, schema, config.getCodec(), + config.isRegisterSchema(), config.isTagWithEncodingId()); } - static Serializer deserializer(SerializerConfig config, JSONSchema schemaData) { + static Serializer deserializer(SerializerConfig config, JSONSchema schema) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schema); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); // schema can be null in which case deserialization will happen into dynamic message - return new JsonDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + return new JsonDeserlizer<>(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache, + config.isTagWithEncodingId()); } - static Serializer> genericDeserializer(SerializerConfig config) { + static Serializer> genericDeserializer(SerializerConfig config) { + Preconditions.checkNotNull(config); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); String groupId = config.getGroupId(); @@ -50,32 +59,39 @@ static Serializer> genericDeserializer(SerializerConfig confi EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); return new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoder(), - encodingCache); + encodingCache, config.isTagWithEncodingId()); } static Serializer jsonStringDeserializer(SerializerConfig config) { + Preconditions.checkNotNull(config); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); String groupId = config.getGroupId(); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - return new JsonStringDeserializer(groupId, schemaRegistryClient, config.getDecoder(), encodingCache); + return new JsonStringDeserializer(groupId, schemaRegistryClient, config.getDecoder(), encodingCache, config.isTagWithEncodingId()); } static Serializer multiTypeSerializer( SerializerConfig config, Map, JSONSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); Map, AbstractSerializer> serializerMap = schemas .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> new JsonSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), - config.isRegisterSchema()))); + config.isRegisterSchema(), config.isTagWithEncodingId()))); return new MultiplexedSerializer<>(serializerMap); } static Serializer multiTypeDeserializer( SerializerConfig config, Map, JSONSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); @@ -84,13 +100,16 @@ static Serializer multiTypeDeserializer( Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), - encodingCache))); + encodingCache, config.isTagWithEncodingId()))); return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), encodingCache); } - static Serializer>> typedOrGenericDeserializer( + static Serializer>> typedOrGenericDeserializer( SerializerConfig config, Map, JSONSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); @@ -98,9 +117,10 @@ static Serializer>> typedOrGenericDeserializer( Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); + x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache, + config.isTagWithEncodingId()))); JsonGenericDeserializer genericDeserializer = new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoder(), - encodingCache); + encodingCache, config.isTagWithEncodingId()); return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, config.getDecoder(), encodingCache); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java index 6411a0778..fcc1bfef7 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java @@ -22,8 +22,8 @@ class JsonStringDeserializer extends AbstractDeserializer { private final ObjectMapper objectMapper; JsonStringDeserializer(String groupId, SchemaRegistryClient client, - SerializerConfig.Decoder decoder, EncodingCache encodingCache) { - super(groupId, client, null, false, decoder, encodingCache); + SerializerConfig.Decoder decoder, EncodingCache encodingCache, boolean encodeHeader) { + super(groupId, client, null, false, decoder, encodingCache, encodeHeader); this.objectMapper = new ObjectMapper(); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); objectMapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java index b24ee9b31..d8a993c25 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java @@ -42,10 +42,14 @@ class MultiFormatSerializerFactory { // region multi format static Serializer> serializer(SerializerConfig config) { + Preconditions.checkNotNull(config); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); return serializerInternal(config, Collections.emptyMap()); } static Serializer> deserializerWithSchema(SerializerConfig config) { + Preconditions.checkNotNull(config); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); return deserializerInternal(config, Collections.emptyMap(), NO_TRANSFORM); } @@ -67,6 +71,9 @@ static Serializer> deserializerWithSchema(SerializerConfig co */ static Serializer deserializeAsT(SerializerConfig config, BiFunction transform) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(transform); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); return deserializeAsTInternal(config, Collections.emptyMap(), transform); } // endregion @@ -74,6 +81,7 @@ static Serializer deserializeAsT(SerializerConfig config, private static Serializer> serializerInternal(SerializerConfig config, Map> customSerializers) { Preconditions.checkNotNull(config); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); String groupId = config.getGroupId(); @@ -91,9 +99,9 @@ private static Serializer deserializeAsTInternal(SerializerConfig config, EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); AbstractDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, - config.getDecoder(), encodingCache); + config.getDecoder(), encodingCache, config.isTagWithEncodingId()); AbstractDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), - encodingCache); + encodingCache, config.isTagWithEncodingId()); AbstractDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), encodingCache); @@ -103,7 +111,8 @@ private static Serializer deserializeAsTInternal(SerializerConfig config, map.put(SerializationFormat.Protobuf, protobuf); deserializers.forEach((key, value) -> { - map.put(key, new AbstractDeserializer(groupId, schemaRegistryClient, null, false, config.getDecoder(), encodingCache) { + map.put(key, new AbstractDeserializer(groupId, schemaRegistryClient, null, false, + config.getDecoder(), encodingCache, config.isTagWithEncodingId()) { @Override protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { return value.deserialize(inputStream, writerSchema, readerSchema); @@ -122,9 +131,9 @@ private static Serializer> deserializerInternal(SerializerConf EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); AbstractDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, - config.getDecoder(), encodingCache); + config.getDecoder(), encodingCache, config.isTagWithEncodingId()); AbstractDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), - encodingCache); + encodingCache, config.isTagWithEncodingId()); AbstractDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), encodingCache); @@ -134,7 +143,8 @@ private static Serializer> deserializerInternal(SerializerConf map.put(SerializationFormat.Protobuf, protobuf); deserializers.forEach((key, value) -> { - map.put(key, new AbstractDeserializer(groupId, schemaRegistryClient, null, false, config.getDecoder(), encodingCache) { + map.put(key, new AbstractDeserializer(groupId, schemaRegistryClient, null, false, + config.getDecoder(), encodingCache, config.isTagWithEncodingId()) { @Override protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { return value.deserialize(inputStream, writerSchema, readerSchema); @@ -156,11 +166,11 @@ private static AbstractSerializer getPravegaSerializer( AvroSchema.from(schemaInfo), config.getCodec(), config.isRegisterSchema()); case Protobuf: ProtobufSerializer m = new ProtobufSerializer<>(groupId, schemaRegistryClient, - ProtobufSchema.from(schemaInfo), config.getCodec(), config.isRegisterSchema()); + ProtobufSchema.from(schemaInfo), config.getCodec(), config.isRegisterSchema(), config.isTagWithEncodingId()); return (AbstractSerializer) m; case Json: return new JsonSerializer<>(groupId, schemaRegistryClient, JSONSchema.from(schemaInfo), - config.getCodec(), config.isRegisterSchema()); + config.getCodec(), config.isRegisterSchema(), config.isTagWithEncodingId()); case Custom: return getCustomSerializer(config, customSerializers, schemaRegistryClient, groupId, schemaInfo); default: @@ -174,7 +184,7 @@ private static AbstractSerializer getCustomSerializer( if (customSerializers.containsKey(schemaInfo.getSerializationFormat())) { CustomSerializer serializer = customSerializers.get(schemaInfo.getSerializationFormat()); return new AbstractSerializer(groupId, schemaRegistryClient, - () -> schemaInfo, config.getCodec(), config.isRegisterSchema()) { + () -> schemaInfo, config.getCodec(), config.isRegisterSchema(), config.isTagWithEncodingId()) { @Override protected void serialize(Object var, SchemaInfo schema, OutputStream outputStream) { serializer.serialize(var, schema, outputStream); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java index 1f63d1157..e2b30f1a4 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java @@ -27,7 +27,7 @@ class MultiFormatWithSchemaDeserializer extends AbstractDeserializer genericDeserializers, SerializerConfig.Decoder decoder, EncodingCache encodingCache, BiFunction transform) { - super(groupId, client, null, false, decoder, encodingCache); + super(groupId, client, null, false, decoder, encodingCache, true); this.genericDeserializers = genericDeserializers; this.transform = transform; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java index ae4e250bb..99ca02174 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java @@ -27,7 +27,7 @@ class MultipleFormatDeserializer extends AbstractDeserializer { Map genericDeserializers, SerializerConfig.Decoder decoder, EncodingCache encodingCache, BiFunction transform) { - super(groupId, client, null, false, decoder, encodingCache); + super(groupId, client, null, false, decoder, encodingCache, true); this.genericDeserializers = genericDeserializers; this.transform = transform; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java index bf761c072..779df458f 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java @@ -27,7 +27,7 @@ class MultiplexedAndGenericDeserializer extends AbstractDeserializer genericDeserializer, SerializerConfig.Decoder decoder, EncodingCache encodingCache) { - super(groupId, client, null, false, decoder, encodingCache); + super(groupId, client, null, false, decoder, encodingCache, true); this.deserializers = deserializers; this.genericDeserializer = genericDeserializer; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java index 7bcdc1b44..ee3c7250e 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java @@ -31,7 +31,7 @@ class MultiplexedDeserializer extends AbstractDeserializer { Map> deserializers, SerializerConfig.Decoder decoder, EncodingCache encodingCache) { - super(groupId, client, null, false, decoder, encodingCache); + super(groupId, client, null, false, decoder, encodingCache, true); this.deserializers = deserializers; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java index c27be25e8..857e9099c 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java @@ -22,8 +22,8 @@ public class ProtobufDeserlizer extends AbstractDe private final ProtobufSchema protobufSchema; ProtobufDeserlizer(String groupId, SchemaRegistryClient client, ProtobufSchema schema, SerializerConfig.Decoder decoder, - EncodingCache encodingCache) { - super(groupId, client, schema, true, decoder, encodingCache); + EncodingCache encodingCache, boolean encodeHeader) { + super(groupId, client, schema, true, decoder, encodingCache, encodeHeader); Preconditions.checkNotNull(schema); this.protobufSchema = schema; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java index deb040e32..428e01bab 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java @@ -29,8 +29,8 @@ public class ProtobufGenericDeserlizer extends AbstractDeserializer knownSchemas; ProtobufGenericDeserlizer(String groupId, SchemaRegistryClient client, @Nullable ProtobufSchema schema, - SerializerConfig.Decoder decoder, EncodingCache encodingCache) { - super(groupId, client, schema, false, decoder, encodingCache); + SerializerConfig.Decoder decoder, EncodingCache encodingCache, boolean encodeHeader) { + super(groupId, client, schema, false, decoder, encodingCache, encodeHeader); Preconditions.checkArgument(isEncodeHeader() || schema != null); knownSchemas = new ConcurrentHashMap<>(); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java index f7e858755..880e76433 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java @@ -20,8 +20,8 @@ class ProtobufSerializer extends AbstractSerializer { ProtobufSerializer(String groupId, SchemaRegistryClient client, ProtobufSchema schema, - Codec codec, boolean registerSchema) { - super(groupId, client, schema, codec, registerSchema); + Codec codec, boolean registerSchema, boolean encodeHeader) { + super(groupId, client, schema, codec, registerSchema, encodeHeader); } @Override diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java index 1938a1b2f..fa568fd95 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java @@ -9,6 +9,7 @@ */ package io.pravega.schemaregistry.serializers; +import com.google.common.base.Preconditions; import com.google.protobuf.DynamicMessage; import com.google.protobuf.GeneratedMessageV3; import com.google.protobuf.Message; @@ -22,7 +23,8 @@ import java.util.Map; import java.util.stream.Collectors; -import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.*; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForDeserializer; +import static io.pravega.schemaregistry.serializers.SerializerFactoryHelper.initForSerializer; /** * Internal Factory class for protobuf serializers and deserializers. @@ -30,47 +32,62 @@ @Slf4j class ProtobufSerializerFactory { static Serializer serializer(SerializerConfig config, - ProtobufSchema schemaData) { + ProtobufSchema schema) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schema); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); - return new ProtobufSerializer<>(groupId, schemaRegistryClient, schemaData, config.getCodec(), - config.isRegisterSchema()); + return new ProtobufSerializer<>(groupId, schemaRegistryClient, schema, config.getCodec(), + config.isRegisterSchema(), config.isTagWithEncodingId()); } static Serializer deserializer(SerializerConfig config, - ProtobufSchema schemaData) { + ProtobufSchema schema) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schema); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); // schema can be null in which case deserialization will happen into dynamic message - return new ProtobufDeserlizer<>(groupId, schemaRegistryClient, schemaData, config.getDecoder(), encodingCache); + return new ProtobufDeserlizer<>(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache, + config.isTagWithEncodingId()); } static Serializer genericDeserializer(SerializerConfig config, @Nullable ProtobufSchema schema) { + Preconditions.checkNotNull(config); + Preconditions.checkArgument(schema != null || config.isTagWithEncodingId(), + "Either read schema should be supplied or events should be tagged with encoding ids."); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); String groupId = config.getGroupId(); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - return new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache); + return new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache, + config.isTagWithEncodingId()); } static Serializer multiTypeSerializer( SerializerConfig config, Map, ProtobufSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); Map, AbstractSerializer> serializerMap = schemas .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> new ProtobufSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), - config.isRegisterSchema()))); + config.isRegisterSchema(), config.isTagWithEncodingId()))); return new MultiplexedSerializer<>(serializerMap); } static Serializer multiTypeDeserializer( SerializerConfig config, Map, ProtobufSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); @@ -78,12 +95,16 @@ static Serializer multiTypeDeserializer( Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); + x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache, + config.isTagWithEncodingId()))); return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), encodingCache); } static Serializer> typedOrGenericDeserializer( SerializerConfig config, Map, ProtobufSchema> schemas) { + Preconditions.checkNotNull(config); + Preconditions.checkNotNull(schemas); + Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); @@ -91,9 +112,10 @@ static Serializer> type Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); + x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache, + config.isTagWithEncodingId()))); ProtobufGenericDeserlizer genericDeserializer = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, - config.getDecoder(), encodingCache); + config.getDecoder(), encodingCache, config.isTagWithEncodingId()); return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, config.getDecoder(), encodingCache); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index ba9919afe..b3f9d8c15 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -75,10 +75,23 @@ public class SerializerConfig { private final boolean failOnCodecMismatch; /** - * Flag to tell the serializer if the group should be created automatically. + * Flag to tell the serializer/deserializer if the group should be created automatically. * It is recommended to register keep this flag as false in production systems and create groups and add schemas */ private final boolean createGroup; + + /** + * Flag to tell the serializer/deserializer if the encoding id should be added as a header with each event. + * By default this is set to true. If users choose to not add the header, they should do so in all their writer and + * reader applications for the given stream. + * + * Adding the event header is a requirement for following cases: + * If {@link SerializationFormat#Avro} is chosen for a group, the event header cannot be false. + * If streams can have multiple types of events, this cannot be false. + * If streams can multiple formats of events, this cannot be false. + */ + private final boolean tagWithEncodingId; + /** * Group properties to use for creating the group if createGroup is set to true. */ @@ -86,7 +99,8 @@ public class SerializerConfig { private SerializerConfig(String groupId, Either registryConfigOrClient, boolean registerSchema, boolean registerCodec, Codec codec, Decoder decoder, boolean failOnCodecMismatch, - boolean createGroup, GroupProperties groupProperties) { + boolean createGroup, boolean tagWithEncodingId, GroupProperties groupProperties) { + this.tagWithEncodingId = tagWithEncodingId; Preconditions.checkArgument(!Strings.isNullOrEmpty(groupId), "Group id needs to be supplied"); Preconditions.checkArgument(registryConfigOrClient != null, "Either registry client or config needs to be supplied"); this.groupId = groupId; @@ -109,6 +123,7 @@ public static final class SerializerConfigBuilder { private boolean registerCodec = false; private boolean createGroup = false; private boolean failOnCodecMismatch = true; + private boolean tagWithEncodingId = true; private Either registryConfigOrClient = null; private GroupProperties groupProperties = GroupProperties.builder().build(); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index f30f2ba67..dddeadd02 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -9,6 +9,7 @@ */ package io.pravega.schemaregistry.serializers; +import com.fasterxml.jackson.databind.JsonNode; import com.google.protobuf.DynamicMessage; import com.google.protobuf.GeneratedMessageV3; import com.google.protobuf.Message; @@ -33,7 +34,7 @@ @Slf4j public class SerializerFactory { - public static final String ENCODE = "encode"; + public static final String PRAVEGA_EVENT_HEADER = "pravegaEventHeader"; // region avro /** @@ -266,7 +267,7 @@ public static Serializer jsonDeserializer(SerializerConfig config, JSONSc * @param config Serializer Config used for instantiating a new serializer. * @return A deserializer Implementation that can be used in {@link io.pravega.client.stream.EventStreamReader}. */ - public static Serializer> jsonGenericDeserializer(SerializerConfig config) { + public static Serializer> jsonGenericDeserializer(SerializerConfig config) { return JsonSerializerFactory.genericDeserializer(config); } @@ -319,7 +320,7 @@ public static Serializer jsonMultiTypeDeserializer( * @param Base type of schemas. * @return a Deserializer which can deserialize events of different types in the stream into typed objects. */ - public static Serializer>> jsonTypedOrGenericDeserializer( + public static Serializer>> jsonTypedOrGenericDeserializer( SerializerConfig config, Map, JSONSchema> schemas) { return JsonSerializerFactory.typedOrGenericDeserializer(config, schemas); } @@ -379,7 +380,7 @@ public static Serializer> serializerWithSchema(SerializerConf * serialization formats. It deserializes them into format specific generic objects. * An event serialized with avro is deserialized into {@link GenericRecord}. * An event serialized with protobuf is deserialized into {@link DynamicMessage}. - * An event serialized with json is deserialized into {@link java.util.LinkedHashMap}. + * An event serialized with json is deserialized into {@link JsonNode}. * * @param config serializer config * @return a deserializer that can deserialize events serialized as protobuf, json or avro into java objects. @@ -393,7 +394,7 @@ public static Serializer> deserializerWithSchema(SerializerCo * serialization formats. * An event serialized with avro is deserialized into {@link GenericRecord}. * An event serialized with protobuf is deserialized into {@link DynamicMessage}. - * An event serialized with json is deserialized into {@link java.util.LinkedHashMap}. + * An event serialized with json is deserialized into {@link JsonNode}. * * @param config serializer config * @return a deserializer that can deserialize events serialized as protobuf, json or avro into java objects. @@ -418,7 +419,7 @@ public static Serializer deserializeAsJsonString(SerializerConfig config * Formats supported are protobuf, avro and json. * An event serialized with avro is deserialized into {@link GenericRecord}. * An event serialized with protobuf is deserialized into {@link DynamicMessage}. - * An event serialized with json is deserialized into {@link java.util.LinkedHashMap}. + * An event serialized with json is deserialized into {@link JsonNode}. * * This also takes a transform function which is applied on the deserialized object and should transform the object * into the type T. diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index 909b57772..6bb96e4c0 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -10,6 +10,11 @@ package io.pravega.schemaregistry.serializers; import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.jsonFormatVisitors.JsonFormatTypes; +import com.fasterxml.jackson.databind.node.TextNode; +import com.fasterxml.jackson.module.jsonSchema.JsonSchema; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import com.google.protobuf.DescriptorProtos; @@ -239,7 +244,7 @@ public void testJsonSerializers() throws JsonProcessingException { SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); JSONSchema schema1 = JSONSchema.of(DerivedUser1.class); JSONSchema schema2 = JSONSchema.of(DerivedUser2.class); - + VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); VersionInfo versionInfo2 = new VersionInfo("name", 1, 1); doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any).build()) @@ -261,10 +266,10 @@ public void testJsonSerializers() throws JsonProcessingException { assertEquals(deserialized, user1); serialized = serializer.serialize(user1); - Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); - WithSchema generic = genericDeserializer.deserialize(serialized); + Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + WithSchema generic = genericDeserializer.deserialize(serialized); assertEquals(((JSONSchema) generic.getSchema()).getSchema(), schema1.getSchema()); - assertEquals(((Map) generic.getObject()).size(), 4); + assertEquals(((JsonNode) generic.getObject()).size(), 4); serialized = serializer.serialize(user1); Serializer stringDeserializer = SerializerFactory.jsonStringDeserializer(config); @@ -285,12 +290,33 @@ public void testJsonSerializers() throws JsonProcessingException { ByteBuffer s = serializer2.serialize(jsonObject); str = stringDeserializer.deserialize(s); + + String stringSchema = new ObjectMapper().writeValueAsString(JsonSchema.minimalForFormat(JsonFormatTypes.STRING)); + + JSONSchema strSchema = JSONSchema.of("string", stringSchema); + VersionInfo versionInfo4 = new VersionInfo("myData", 0, 3); + doAnswer(x -> versionInfo4).when(client).getVersionForSchema(anyString(), eq(strSchema.getSchemaInfo())); + doAnswer(x -> new EncodingId(3)).when(client).getEncodingId(anyString(), eq(versionInfo4), any()); + doAnswer(x -> new EncodingInfo(versionInfo4, strSchema.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(3))); + + Serializer serializer3 = SerializerFactory.jsonSerializer(config, strSchema); + Serializer deserializer3 = SerializerFactory.jsonDeserializer(config, strSchema); + Serializer> generic3 = SerializerFactory.jsonGenericDeserializer(config); + String string = "a"; + s = serializer3.serialize(string); + Object x = deserializer3.deserialize(s); + assertTrue(x instanceof String); + assertEquals(x, string); + s = serializer3.serialize(string); + Object jsonNode = generic3.deserialize(s); + assertTrue(((WithSchema) jsonNode).getObject() instanceof TextNode); + assertEquals(((TextNode) ((WithSchema) jsonNode).getObject()).textValue(), string); // multi type DerivedUser2 user2 = new DerivedUser2("user", new Address("street", "city"), 2, "user2"); JSONSchema schema1Base = JSONSchema.ofBaseType(DerivedUser1.class, Object.class); JSONSchema schema2Base = JSONSchema.ofBaseType(DerivedUser2.class, Object.class); - Map, JSONSchema> map = new HashMap<>(); + Map, JSONSchema> map = new HashMap<>(); map.put(DerivedUser1.class, schema1Base); map.put(DerivedUser2.class, schema2Base); Serializer multiSerializer = SerializerFactory.jsonMultiTypeSerializer(config, map); @@ -303,11 +329,11 @@ public void testJsonSerializers() throws JsonProcessingException { deserialized2 = multiDeserializer.deserialize(serialized); assertEquals(deserialized2, user2); - Map, JSONSchema> map2 = new HashMap<>(); + Map, JSONSchema> map2 = new HashMap<>(); map2.put(DerivedUser1.class, schema1Base); - Serializer>> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); + Serializer>> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); serialized = multiSerializer.serialize(user1); - Either> fallback = fallbackDeserializer.deserialize(serialized); + Either> fallback = fallbackDeserializer.deserialize(serialized); assertTrue(fallback.isLeft()); assertEquals(fallback.getLeft(), user1); @@ -381,7 +407,8 @@ public void testMultiformatDeserializers() throws IOException { @Test public void testNoEncodingProto() throws IOException { SchemaRegistryClient client = mock(SchemaRegistryClient.class); - SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId") + .tagWithEncodingId(false).build(); Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); byte[] schemaBytes = Files.readAllBytes(path); DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); @@ -389,7 +416,7 @@ public void testNoEncodingProto() throws IOException { VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any) - .properties(ImmutableMap.of(SerializerFactory.ENCODE, Boolean.toString(false))).build()) + .properties(ImmutableMap.of()).build()) .when(client).getGroupProperties(anyString()); doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); doAnswer(x -> new SchemaWithVersion(schema1.getSchemaInfo(), versionInfo1)).when(client).getLatestSchemaVersion(anyString(), any()); @@ -421,12 +448,13 @@ public void testNoEncodingProto() throws IOException { @Test public void testNoEncodingJson() throws IOException { SchemaRegistryClient client = mock(SchemaRegistryClient.class); - SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId").build(); + SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId") + .tagWithEncodingId(false).build(); JSONSchema schema1 = JSONSchema.of(DerivedUser1.class); VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); doAnswer(x -> GroupProperties.builder().serializationFormat(SerializationFormat.Any) - .properties(ImmutableMap.of(SerializerFactory.ENCODE, Boolean.toString(false))).build()) + .properties(ImmutableMap.of()).build()) .when(client).getGroupProperties(anyString()); doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); doAnswer(x -> new SchemaWithVersion(schema1.getSchemaInfo(), versionInfo1)).when(client).getLatestSchemaVersion(anyString(), any()); @@ -444,9 +472,9 @@ public void testNoEncodingJson() throws IOException { serialized = serializer.serialize(user1); - Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); - WithSchema generic = genericDeserializer.deserialize(serialized); + WithSchema generic = genericDeserializer.deserialize(serialized); assertNotNull(generic.getObject()); assertNull(generic.getSchema()); } From c4a732bb63a558b974cc2688c4970fa72795f9c0 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 8 Jul 2020 00:55:49 -0700 Subject: [PATCH 48/70] review comments Signed-off-by: Shivesh Ranjan --- .../client/SchemaRegistryClient.java | 5 +- .../client/SchemaRegistryClientImpl.java | 9 +- .../client/TestSchemaRegistryClient.java | 29 ++-- .../contract/data/CodecType.java | 49 +++++++ .../contract/data/EncodingInfo.java | 2 +- .../generated/rest/model/CodecType.java | 127 ++++++++++++++++++ .../generated/rest/model/CodecTypesList.java | 13 +- .../generated/rest/model/EncodingInfo.java | 9 +- .../contract/transform/ModelHelper.java | 17 ++- .../schemaregistry/contract/v1/ApiV1.java | 5 +- contract/src/main/swagger/SchemaRegistry.yaml | 29 +++- .../contract/transform/ModelHelperTest.java | 15 ++- 12 files changed, 262 insertions(+), 47 deletions(-) create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/CodecType.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecType.java diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java index 1e2ef9f44..a0d0083ec 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java @@ -10,6 +10,7 @@ package io.pravega.schemaregistry.client; import com.google.common.annotations.Beta; +import io.pravega.schemaregistry.contract.data.CodecType; import io.pravega.schemaregistry.contract.data.Compatibility; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; @@ -296,7 +297,7 @@ SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schema * @throws ResourceNotFoundException if group is not found. * @throws UnauthorizedException if the user is unauthorized. */ - List getCodecTypes(String groupId) throws ResourceNotFoundException, UnauthorizedException; + List getCodecTypes(String groupId) throws ResourceNotFoundException, UnauthorizedException; /** * Add new codec type to be used in encoding in the group. Adding a new codectype is backward incompatible. @@ -307,7 +308,7 @@ SchemaWithVersion getLatestSchemaVersion(String groupId, @Nullable String schema * @throws ResourceNotFoundException if group is not found. * @throws UnauthorizedException if the user is unauthorized. */ - void addCodecType(String groupId, String codecType) throws ResourceNotFoundException, UnauthorizedException; + void addCodecType(String groupId, CodecType codecType) throws ResourceNotFoundException, UnauthorizedException; /** * Gets complete schema evolution history of the group with schemas, versions, compatibility policy and diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java index 0a5ec62f4..32169e355 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java @@ -13,6 +13,7 @@ import io.pravega.common.Exceptions; import io.pravega.common.util.Retry; import io.pravega.schemaregistry.common.ContinuationTokenIterator; +import io.pravega.schemaregistry.contract.data.CodecType; import io.pravega.schemaregistry.contract.data.Compatibility; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; @@ -404,13 +405,13 @@ public boolean canReadUsing(String groupId, SchemaInfo schemaInfo) { } @Override - public List getCodecTypes(String groupId) { + public List getCodecTypes(String groupId) { return withRetry(() -> { Response response = groupProxy.getCodecTypesList(groupId); CodecTypesList list = response.readEntity(CodecTypesList.class); switch (Response.Status.fromStatusCode(response.getStatus())) { case OK: - return list.getCodecTypes(); + return list.getCodecTypes().stream().map(ModelHelper::decode).collect(Collectors.toList()); case NOT_FOUND: throw new ResourceNotFoundException("Group not found."); default: @@ -421,9 +422,9 @@ public List getCodecTypes(String groupId) { } @Override - public void addCodecType(String groupId, String codecType) { + public void addCodecType(String groupId, CodecType codecType) { withRetry(() -> { - Response response = groupProxy.addCodecType(groupId, codecType); + Response response = groupProxy.addCodecType(groupId, ModelHelper.encode(codecType)); switch (Response.Status.fromStatusCode(response.getStatus())) { case CREATED: diff --git a/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java b/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java index 52360606e..141446eca 100644 --- a/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java +++ b/client/src/test/java/io/pravega/schemaregistry/client/TestSchemaRegistryClient.java @@ -11,6 +11,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import io.pravega.schemaregistry.contract.data.CodecType; import io.pravega.schemaregistry.contract.data.Compatibility; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; @@ -311,7 +312,7 @@ public void testGetEncodingInfo() { SerializationFormat serializationFormat = SerializationFormat.custom("custom"); ByteBuffer schemaData = ByteBuffer.wrap(new byte[0]); SchemaInfo schemaInfo = new SchemaInfo("schema1", serializationFormat, schemaData, ImmutableMap.of()); - String codecType = "gzip"; + CodecType codecType = new CodecType("gzip"); EncodingInfo encodingInfo = new EncodingInfo(versionInfo, schemaInfo, codecType); EncodingId encodingId = new EncodingId(5); doReturn(ModelHelper.encode(encodingInfo)).when(response).readEntity( @@ -338,25 +339,25 @@ public void testGetEncodingId() { doReturn(response).when(proxy).getEncodingId(anyString(), any()); doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); - String codecType = "gzip"; + CodecType codecType = new CodecType("gzip"); VersionInfo versionInfo = new VersionInfo("schema2", 5, 5); io.pravega.schemaregistry.contract.generated.rest.model.EncodingId encodingId = ModelHelper.encode(new EncodingId(5)); doReturn(encodingId).when(response).readEntity( io.pravega.schemaregistry.contract.generated.rest.model.EncodingId.class); - EncodingId encodingId1 = client.getEncodingId("mygroup", versionInfo, codecType); + EncodingId encodingId1 = client.getEncodingId("mygroup", versionInfo, codecType.getName()); assertEquals(encodingId.getEncodingId().intValue(), encodingId1.getId()); // NotFound Exception doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); AssertExtensions.assertThrows("An exception should have been thrown", - () -> client.getEncodingId("mygroup", versionInfo, codecType), e -> e instanceof ResourceNotFoundException); + () -> client.getEncodingId("mygroup", versionInfo, codecType.getName()), e -> e instanceof ResourceNotFoundException); // StringNotFound Exception doReturn(Response.Status.PRECONDITION_FAILED.getStatusCode()).when(response).getStatus(); AssertExtensions.assertThrows("An exception should have been thrown", - () -> client.getEncodingId("mygroup", versionInfo, codecType), e -> e instanceof CodecTypeNotRegisteredException); + () -> client.getEncodingId("mygroup", versionInfo, codecType.getName()), e -> e instanceof CodecTypeNotRegisteredException); // Runtime Exception doReturn(Response.Status.CONFLICT.getStatusCode()).when(response).getStatus(); AssertExtensions.assertThrows("An exception should have been thrown", - () -> client.getEncodingId("mygroup", versionInfo, codecType), e -> e instanceof InternalServerError); + () -> client.getEncodingId("mygroup", versionInfo, codecType.getName()), e -> e instanceof InternalServerError); } @Test @@ -567,16 +568,16 @@ public void testGetCodecTypes() { doReturn(response).when(proxy).getCodecTypesList(anyString()); doReturn(Response.Status.OK.getStatusCode()).when(response).getStatus(); - String codecType = "gzip"; - String codecType1 = "snappy"; + CodecType codecType = new CodecType("gzip"); + CodecType codecType1 = new CodecType("snappy"); CodecTypesList codecTypesList = new CodecTypesList(); - codecTypesList.addCodecTypesItem(codecType); - codecTypesList.addCodecTypesItem(codecType1); + codecTypesList.addCodecTypesItem(ModelHelper.encode(codecType)); + codecTypesList.addCodecTypesItem(ModelHelper.encode(codecType1)); doReturn(codecTypesList).when(response).readEntity(CodecTypesList.class); - List codecTypesList1 = client.getCodecTypes("mygroup"); + List codecTypesList1 = client.getCodecTypes("mygroup"); assertEquals(2, codecTypesList1.size()); - assertEquals("gzip", codecTypesList1.get(0)); - assertEquals("snappy", codecTypesList1.get(1)); + assertEquals("gzip", codecTypesList1.get(0).getName()); + assertEquals("snappy", codecTypesList1.get(1).getName()); //NotFound Exception doReturn(Response.Status.NOT_FOUND.getStatusCode()).when(response).getStatus(); AssertExtensions.assertThrows("An exception should have been thrown", @@ -595,7 +596,7 @@ public void testAddCodecType() { doReturn(response).when(proxy).addCodecType(anyString(), any()); doReturn(Response.Status.CREATED.getStatusCode()).when(response).getStatus(); - String codecType = "gzip"; + CodecType codecType = new CodecType("gzip"); client.addCodecType("mygroup", codecType); assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatus()); //NotFound Exception diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/CodecType.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/CodecType.java new file mode 100644 index 000000000..fce9841b3 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/CodecType.java @@ -0,0 +1,49 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import io.pravega.common.ObjectBuilder; +import lombok.Builder; +import lombok.Data; + +/** + * Encapsulates properties of a codecType. + */ +@Data +@Builder +public class CodecType { + /** + * Name that identifies the codec type. Users could typically use the mime type name for the encoding. + */ + private final String name; + /** + * User defined key value strings that users can use to add any additional metadata to the codecType. + * This can be used to share additional information with the decoder about how to decode, for example, if codecType was + * for encryption, the additional information could include algorithm and other params required for decryption. + * This is opaque to the service and stored with the codecType when the codec is registered for a group and delivered + * with encoding information. + */ + private final ImmutableMap properties; + + public CodecType(String name) { + this(name, ImmutableMap.of()); + } + + public CodecType(String name, ImmutableMap properties) { + Preconditions.checkArgument(name != null); + this.name = name; + this.properties = properties; + } + + public static class CodecTypeBuilder implements ObjectBuilder { + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java index f5e396ea2..9846e7f2e 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java @@ -29,5 +29,5 @@ public class EncodingInfo { /** * Codec type which is used in encoding the data. */ - private final String codecType; + private final CodecType codecType; } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecType.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecType.java new file mode 100644 index 000000000..362f9c0eb --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecType.java @@ -0,0 +1,127 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.validation.constraints.*; + +/** + * Codec Type. + */ +@ApiModel(description = "Codec Type.") + +public class CodecType { + @JsonProperty("name") + private String name = null; + + @JsonProperty("properties") + private Map properties = null; + + public CodecType name(String name) { + this.name = name; + return this; + } + + /** + * codecType name. + * @return name + **/ + @JsonProperty("name") + @ApiModelProperty(required = true, value = "codecType name.") + @NotNull + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public CodecType properties(Map properties) { + this.properties = properties; + return this; + } + + public CodecType putPropertiesItem(String key, String propertiesItem) { + if (this.properties == null) { + this.properties = new HashMap(); + } + this.properties.put(key, propertiesItem); + return this; + } + + /** + * User defined key value strings. Maximum size for the map, combined length of all keys and values should be less than or equal to 1 mb. + * @return properties + **/ + @JsonProperty("properties") + @ApiModelProperty(value = "User defined key value strings. Maximum size for the map, combined length of all keys and values should be less than or equal to 1 mb.") + public Map getProperties() { + return properties; + } + + public void setProperties(Map properties) { + this.properties = properties; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + CodecType codecType = (CodecType) o; + return Objects.equals(this.name, codecType.name) && + Objects.equals(this.properties, codecType.properties); + } + + @Override + public int hashCode() { + return Objects.hash(name, properties); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class CodecType {\n"); + + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" properties: ").append(toIndentedString(properties)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java index 96c10bacc..ee87dc405 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java @@ -16,6 +16,7 @@ import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecType; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.util.ArrayList; @@ -29,16 +30,16 @@ public class CodecTypesList { @JsonProperty("codecTypes") - private List codecTypes = null; + private List codecTypes = null; - public CodecTypesList codecTypes(List codecTypes) { + public CodecTypesList codecTypes(List codecTypes) { this.codecTypes = codecTypes; return this; } - public CodecTypesList addCodecTypesItem(String codecTypesItem) { + public CodecTypesList addCodecTypesItem(CodecType codecTypesItem) { if (this.codecTypes == null) { - this.codecTypes = new ArrayList(); + this.codecTypes = new ArrayList(); } this.codecTypes.add(codecTypesItem); return this; @@ -50,11 +51,11 @@ public CodecTypesList addCodecTypesItem(String codecTypesItem) { **/ @JsonProperty("codecTypes") @ApiModelProperty(value = "List of codecTypes.") - public List getCodecTypes() { + public List getCodecTypes() { return codecTypes; } - public void setCodecTypes(List codecTypes) { + public void setCodecTypes(List codecTypes) { this.codecTypes = codecTypes; } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingInfo.java index 1276ec038..4d3b1e5ec 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingInfo.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/EncodingInfo.java @@ -16,6 +16,7 @@ import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecType; import io.pravega.schemaregistry.contract.generated.rest.model.SchemaInfo; import io.pravega.schemaregistry.contract.generated.rest.model.VersionInfo; import io.swagger.annotations.ApiModel; @@ -35,7 +36,7 @@ public class EncodingInfo { private VersionInfo versionInfo = null; @JsonProperty("codecType") - private String codecType = null; + private CodecType codecType = null; public EncodingInfo schemaInfo(SchemaInfo schemaInfo) { this.schemaInfo = schemaInfo; @@ -77,7 +78,7 @@ public void setVersionInfo(VersionInfo versionInfo) { this.versionInfo = versionInfo; } - public EncodingInfo codecType(String codecType) { + public EncodingInfo codecType(CodecType codecType) { this.codecType = codecType; return this; } @@ -89,11 +90,11 @@ public EncodingInfo codecType(String codecType) { @JsonProperty("codecType") @ApiModelProperty(required = true, value = "Codec type.") @NotNull - public String getCodecType() { + public CodecType getCodecType() { return codecType; } - public void setCodecType(String codecType) { + public void setCodecType(CodecType codecType) { this.codecType = codecType; } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java index beb08164b..3d6892068 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java @@ -17,6 +17,7 @@ import io.pravega.schemaregistry.contract.generated.rest.model.BackwardPolicy; import io.pravega.schemaregistry.contract.generated.rest.model.BackwardTill; import io.pravega.schemaregistry.contract.generated.rest.model.BackwardTransitive; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecType; import io.pravega.schemaregistry.contract.generated.rest.model.Compatibility; import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; @@ -197,7 +198,14 @@ public static io.pravega.schemaregistry.contract.data.EncodingInfo decode(Encodi Preconditions.checkArgument(encodingInfo.getSchemaInfo() != null, "SchemaInfo cannot be null"); Preconditions.checkArgument(encodingInfo.getCodecType() != null, "CodecType cannot be null"); return new io.pravega.schemaregistry.contract.data.EncodingInfo(decode(encodingInfo.getVersionInfo()), - decode(encodingInfo.getSchemaInfo()), encodingInfo.getCodecType()); + decode(encodingInfo.getSchemaInfo()), decode(encodingInfo.getCodecType())); + } + + public static io.pravega.schemaregistry.contract.data.CodecType decode(CodecType codecType) { + Preconditions.checkArgument(codecType != null, "CodecType cannot be null"); + Preconditions.checkArgument(codecType.getName() != null, "CodecType.name cannot be null"); + return codecType.getProperties() == null ? new io.pravega.schemaregistry.contract.data.CodecType(codecType.getName()) : + new io.pravega.schemaregistry.contract.data.CodecType(codecType.getName(), ImmutableMap.copyOf(codecType.getProperties())); } public static io.pravega.schemaregistry.contract.data.SchemaWithVersion decode(SchemaWithVersion schemaWithVersion) { @@ -336,11 +344,16 @@ public static EncodingId encode(io.pravega.schemaregistry.contract.data.Encoding } public static EncodingInfo encode(io.pravega.schemaregistry.contract.data.EncodingInfo encodingInfo) { - return new EncodingInfo().codecType(encodingInfo.getCodecType()) + return new EncodingInfo().codecType(encode(encodingInfo.getCodecType())) .versionInfo(encode(encodingInfo.getVersionInfo())) .schemaInfo(encode(encodingInfo.getSchemaInfo())); } + public static CodecType encode(io.pravega.schemaregistry.contract.data.CodecType codecType) { + return new CodecType().name(codecType.getName()) + .properties(codecType.getProperties()); + } + // endregion private static > T searchEnum(Class enumeration, String search) { diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java index 7c4ead830..53bb3f53d 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/v1/ApiV1.java @@ -12,6 +12,7 @@ import com.google.common.annotations.Beta; import io.pravega.schemaregistry.contract.generated.rest.model.AddedTo; import io.pravega.schemaregistry.contract.generated.rest.model.CanRead; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecType; import io.pravega.schemaregistry.contract.generated.rest.model.CodecTypesList; import io.pravega.schemaregistry.contract.generated.rest.model.CreateGroupRequest; import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; @@ -65,7 +66,7 @@ public interface GroupsApi { @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while creating a Group", response = Void.class)}) Response addCodecType(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "The codec type", required = true) String codecType); + @ApiParam(value = "The codec type", required = true) CodecType codecType); @POST @Path("/{groupName}/schemas") @@ -295,7 +296,7 @@ public interface GroupsApiAsync { @io.swagger.annotations.ApiResponse(code = 404, message = "Group not found", response = Void.class), @io.swagger.annotations.ApiResponse(code = 500, message = "Internal server error while registering codectype to a Group", response = Void.class)}) void addCodecType(@ApiParam(value = "Group name", required = true) @PathParam("groupName") String groupName, - @ApiParam(value = "Add codec type", required = true) String codecType, @Suspended AsyncResponse asyncResponse); + @ApiParam(value = "Add codec type", required = true) CodecType codecType, @Suspended AsyncResponse asyncResponse); @POST @Path("/{groupName}/schemas") diff --git a/contract/src/main/swagger/SchemaRegistry.yaml b/contract/src/main/swagger/SchemaRegistry.yaml index 65448ea59..c64849945 100644 --- a/contract/src/main/swagger/SchemaRegistry.yaml +++ b/contract/src/main/swagger/SchemaRegistry.yaml @@ -474,6 +474,7 @@ paths: $ref: "#/definitions/VersionInfo" codecType: type: string + description: name of Codec Type required: - versionInfo - codecType @@ -555,7 +556,7 @@ paths: description: The codecType required: true schema: - type: string + $ref: "#/definitions/CodecType" responses: 201: description: Successfully added codecType to group @@ -658,12 +659,12 @@ definitions: type: string format: binary properties: - description: User defined key value strings. + description: User defined key value strings. Maximum size for the map, combined length of all keys and values should be less than or equal to 1 mb. type: object additionalProperties: type: string minLength: 0 - maxLength: 40 + maxLength: 1048576 required: - type - serializationFormat @@ -731,7 +732,7 @@ definitions: $ref: "#/definitions/VersionInfo" codecType: description: Codec type. - type: string + $ref: "#/definitions/CodecType" required: - schemaInfo - versionInfo @@ -860,6 +861,22 @@ definitions: required: - name - versionInfo + CodecType: + type: object + description: Codec Type. + properties: + name: + type: string + description: codecType name. + properties: + description: User defined key value strings. Maximum size for the map, combined length of all keys and values should be less than or equal to 1 mb. + type: object + additionalProperties: + type: string + minLength: 0 + maxLength: 1048576 + required: + - name CodecTypesList: type: object description: Response object for listCodecTypes. @@ -868,7 +885,7 @@ definitions: type: array description: List of codecTypes. items: - type: string + $ref: "#/definitions/CodecType" Valid: type: object description: Response object for validateSchema api. @@ -883,7 +900,7 @@ definitions: description: Response object for canRead api. properties: compatible: - description: Whether given schema is compatible and can be used for reads. BackwardAndForward is checked against existing group schemas subject to group's configured compatibility policy. + description: Whether given schema is compatible and can be used for reads. Compatibility is checked against existing group schemas subject to group's configured compatibility policy. type: boolean required: - compatible diff --git a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java index 0ca9f4803..ab9f92366 100644 --- a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java +++ b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java @@ -14,6 +14,7 @@ import io.pravega.schemaregistry.contract.generated.rest.model.BackwardAndForward; import io.pravega.schemaregistry.contract.generated.rest.model.BackwardPolicy; import io.pravega.schemaregistry.contract.generated.rest.model.BackwardTill; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecType; import io.pravega.schemaregistry.contract.generated.rest.model.Compatibility; import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; @@ -50,7 +51,7 @@ public void testDecode() { .backwardPolicy(new BackwardTill().name(BackwardTill.class.getSimpleName()).versionInfo(version))) .forwardPolicy(new ForwardPolicy().forwardPolicy(new ForwardTill().name(ForwardTill.class.getSimpleName()).versionInfo(version))) ); - String codecType = "custom"; + CodecType codecType = new CodecType().name("custom"); // decodes io.pravega.schemaregistry.contract.data.SerializationFormat serializationFormat = ModelHelper.decode(type); @@ -81,8 +82,9 @@ public void testDecode() { assertEquals(versionInfo.getType(), version.getType()); assertEquals(versionInfo.getVersion(), version.getVersion().intValue()); - io.pravega.schemaregistry.contract.data.EncodingInfo encodingInfo = ModelHelper.decode(new EncodingInfo().schemaInfo(schema).versionInfo(version).codecType(codecType)); - assertEquals(encodingInfo.getCodecType(), "custom"); + io.pravega.schemaregistry.contract.data.EncodingInfo encodingInfo = ModelHelper.decode( + new EncodingInfo().schemaInfo(schema).versionInfo(version).codecType(codecType)); + assertEquals(encodingInfo.getCodecType().getName(), "custom"); assertEquals(encodingInfo.getVersionInfo(), versionInfo); assertEquals(encodingInfo.getSchemaInfo(), schemaInfo); io.pravega.schemaregistry.contract.data.SchemaWithVersion schemaWithVersion = ModelHelper.decode(new SchemaWithVersion().schemaInfo(schema).versionInfo(version)); @@ -107,7 +109,7 @@ public void testEncode() { io.pravega.schemaregistry.contract.data.GroupProperties prop = io.pravega.schemaregistry.contract.data.GroupProperties .builder().serializationFormat(serializationFormat).compatibility(compatibility) .allowMultipleTypes(true).properties(ImmutableMap.of()).build(); - String codecType = "codecType"; + io.pravega.schemaregistry.contract.data.CodecType codecType = new io.pravega.schemaregistry.contract.data.CodecType("codecType"); // encode test VersionInfo version = ModelHelper.encode(versionInfo); @@ -126,8 +128,9 @@ public void testEncode() { EncodingId encodingId = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.EncodingId(0)); assertEquals(encodingId.getEncodingId().intValue(), 0); - EncodingInfo encodingInfo = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.EncodingInfo(versionInfo, schemaInfo, codecType)); - assertEquals(encodingInfo.getCodecType(), codecType); + EncodingInfo encodingInfo = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.EncodingInfo( + versionInfo, schemaInfo, codecType)); + assertEquals(encodingInfo.getCodecType(), ModelHelper.encode(codecType)); assertEquals(encodingInfo.getVersionInfo(), version); assertEquals(encodingInfo.getSchemaInfo(), schema); From 5daaeb419a1c2cee774780dc0d8c4f497abd0617 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 8 Jul 2020 01:08:26 -0700 Subject: [PATCH 49/70] temp Signed-off-by: Shivesh Ranjan --- .../contract/data/CodecType.java | 49 +++++++ .../contract/data/EncodingInfo.java | 2 +- .../generated/rest/model/CodecType.java | 127 ++++++++++++++++++ .../generated/rest/model/CodecTypesList.java | 13 +- .../contract/transform/ModelHelper.java | 17 ++- .../contract/transform/ModelHelperTest.java | 15 ++- .../pravega/schemaregistry/codec/Codec.java | 8 +- .../serializers/AbstractSerializer.java | 2 +- .../schemaregistry/serializers/Codecs.java | 29 ++-- .../serializers/SerializerConfig.java | 27 ++-- .../serializers/SerializerFactoryHelper.java | 3 +- .../schemaregistry/codec/CodecTest.java | 6 +- .../serializers/SerializerTest.java | 26 ++-- 13 files changed, 263 insertions(+), 61 deletions(-) create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/data/CodecType.java create mode 100644 contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecType.java diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/CodecType.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/CodecType.java new file mode 100644 index 000000000..fce9841b3 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/CodecType.java @@ -0,0 +1,49 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.contract.data; + +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import io.pravega.common.ObjectBuilder; +import lombok.Builder; +import lombok.Data; + +/** + * Encapsulates properties of a codecType. + */ +@Data +@Builder +public class CodecType { + /** + * Name that identifies the codec type. Users could typically use the mime type name for the encoding. + */ + private final String name; + /** + * User defined key value strings that users can use to add any additional metadata to the codecType. + * This can be used to share additional information with the decoder about how to decode, for example, if codecType was + * for encryption, the additional information could include algorithm and other params required for decryption. + * This is opaque to the service and stored with the codecType when the codec is registered for a group and delivered + * with encoding information. + */ + private final ImmutableMap properties; + + public CodecType(String name) { + this(name, ImmutableMap.of()); + } + + public CodecType(String name, ImmutableMap properties) { + Preconditions.checkArgument(name != null); + this.name = name; + this.properties = properties; + } + + public static class CodecTypeBuilder implements ObjectBuilder { + } +} diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java b/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java index f5e396ea2..9846e7f2e 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/data/EncodingInfo.java @@ -29,5 +29,5 @@ public class EncodingInfo { /** * Codec type which is used in encoding the data. */ - private final String codecType; + private final CodecType codecType; } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecType.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecType.java new file mode 100644 index 000000000..a727ba823 --- /dev/null +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecType.java @@ -0,0 +1,127 @@ +/* + * Pravega Schema Registry APIs + * REST APIs for Pravega Schema Registry. + * + * OpenAPI spec version: 0.0.1 + * + * + * NOTE: This class is auto generated by the swagger code generator program. + * https://github.com/swagger-api/swagger-codegen.git + * Do not edit the class manually. + */ + + +package io.pravega.schemaregistry.contract.generated.rest.model; + +import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.validation.constraints.*; + +/** + * Codec Type. + */ +@ApiModel(description = "Codec Type.") + +public class CodecType { + @JsonProperty("name") + private String name = null; + + @JsonProperty("properties") + private Map properties = null; + + public CodecType name(String name) { + this.name = name; + return this; + } + + /** + * codecType name. + * @return name + **/ + @JsonProperty("name") + @ApiModelProperty(required = true, value = "codecType name.") + @NotNull + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public CodecType properties(Map properties) { + this.properties = properties; + return this; + } + + public CodecType putPropertiesItem(String key, String propertiesItem) { + if (this.properties == null) { + this.properties = new HashMap(); + } + this.properties.put(key, propertiesItem); + return this; + } + + /** + * User defined key value strings. + * @return properties + **/ + @JsonProperty("properties") + @ApiModelProperty(value = "User defined key value strings.") + public Map getProperties() { + return properties; + } + + public void setProperties(Map properties) { + this.properties = properties; + } + + + @Override + public boolean equals(java.lang.Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + CodecType codecType = (CodecType) o; + return Objects.equals(this.name, codecType.name) && + Objects.equals(this.properties, codecType.properties); + } + + @Override + public int hashCode() { + return Objects.hash(name, properties); + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class CodecType {\n"); + + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" properties: ").append(toIndentedString(properties)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(java.lang.Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} + diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java index 96c10bacc..ee87dc405 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/generated/rest/model/CodecTypesList.java @@ -16,6 +16,7 @@ import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecType; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.util.ArrayList; @@ -29,16 +30,16 @@ public class CodecTypesList { @JsonProperty("codecTypes") - private List codecTypes = null; + private List codecTypes = null; - public CodecTypesList codecTypes(List codecTypes) { + public CodecTypesList codecTypes(List codecTypes) { this.codecTypes = codecTypes; return this; } - public CodecTypesList addCodecTypesItem(String codecTypesItem) { + public CodecTypesList addCodecTypesItem(CodecType codecTypesItem) { if (this.codecTypes == null) { - this.codecTypes = new ArrayList(); + this.codecTypes = new ArrayList(); } this.codecTypes.add(codecTypesItem); return this; @@ -50,11 +51,11 @@ public CodecTypesList addCodecTypesItem(String codecTypesItem) { **/ @JsonProperty("codecTypes") @ApiModelProperty(value = "List of codecTypes.") - public List getCodecTypes() { + public List getCodecTypes() { return codecTypes; } - public void setCodecTypes(List codecTypes) { + public void setCodecTypes(List codecTypes) { this.codecTypes = codecTypes; } diff --git a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java index beb08164b..3d6892068 100644 --- a/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java +++ b/contract/src/main/java/io/pravega/schemaregistry/contract/transform/ModelHelper.java @@ -17,6 +17,7 @@ import io.pravega.schemaregistry.contract.generated.rest.model.BackwardPolicy; import io.pravega.schemaregistry.contract.generated.rest.model.BackwardTill; import io.pravega.schemaregistry.contract.generated.rest.model.BackwardTransitive; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecType; import io.pravega.schemaregistry.contract.generated.rest.model.Compatibility; import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; @@ -197,7 +198,14 @@ public static io.pravega.schemaregistry.contract.data.EncodingInfo decode(Encodi Preconditions.checkArgument(encodingInfo.getSchemaInfo() != null, "SchemaInfo cannot be null"); Preconditions.checkArgument(encodingInfo.getCodecType() != null, "CodecType cannot be null"); return new io.pravega.schemaregistry.contract.data.EncodingInfo(decode(encodingInfo.getVersionInfo()), - decode(encodingInfo.getSchemaInfo()), encodingInfo.getCodecType()); + decode(encodingInfo.getSchemaInfo()), decode(encodingInfo.getCodecType())); + } + + public static io.pravega.schemaregistry.contract.data.CodecType decode(CodecType codecType) { + Preconditions.checkArgument(codecType != null, "CodecType cannot be null"); + Preconditions.checkArgument(codecType.getName() != null, "CodecType.name cannot be null"); + return codecType.getProperties() == null ? new io.pravega.schemaregistry.contract.data.CodecType(codecType.getName()) : + new io.pravega.schemaregistry.contract.data.CodecType(codecType.getName(), ImmutableMap.copyOf(codecType.getProperties())); } public static io.pravega.schemaregistry.contract.data.SchemaWithVersion decode(SchemaWithVersion schemaWithVersion) { @@ -336,11 +344,16 @@ public static EncodingId encode(io.pravega.schemaregistry.contract.data.Encoding } public static EncodingInfo encode(io.pravega.schemaregistry.contract.data.EncodingInfo encodingInfo) { - return new EncodingInfo().codecType(encodingInfo.getCodecType()) + return new EncodingInfo().codecType(encode(encodingInfo.getCodecType())) .versionInfo(encode(encodingInfo.getVersionInfo())) .schemaInfo(encode(encodingInfo.getSchemaInfo())); } + public static CodecType encode(io.pravega.schemaregistry.contract.data.CodecType codecType) { + return new CodecType().name(codecType.getName()) + .properties(codecType.getProperties()); + } + // endregion private static > T searchEnum(Class enumeration, String search) { diff --git a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java index 0ca9f4803..ab9f92366 100644 --- a/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java +++ b/contract/src/test/java/io/pravega/schemaregistry/contract/transform/ModelHelperTest.java @@ -14,6 +14,7 @@ import io.pravega.schemaregistry.contract.generated.rest.model.BackwardAndForward; import io.pravega.schemaregistry.contract.generated.rest.model.BackwardPolicy; import io.pravega.schemaregistry.contract.generated.rest.model.BackwardTill; +import io.pravega.schemaregistry.contract.generated.rest.model.CodecType; import io.pravega.schemaregistry.contract.generated.rest.model.Compatibility; import io.pravega.schemaregistry.contract.generated.rest.model.EncodingId; import io.pravega.schemaregistry.contract.generated.rest.model.EncodingInfo; @@ -50,7 +51,7 @@ public void testDecode() { .backwardPolicy(new BackwardTill().name(BackwardTill.class.getSimpleName()).versionInfo(version))) .forwardPolicy(new ForwardPolicy().forwardPolicy(new ForwardTill().name(ForwardTill.class.getSimpleName()).versionInfo(version))) ); - String codecType = "custom"; + CodecType codecType = new CodecType().name("custom"); // decodes io.pravega.schemaregistry.contract.data.SerializationFormat serializationFormat = ModelHelper.decode(type); @@ -81,8 +82,9 @@ public void testDecode() { assertEquals(versionInfo.getType(), version.getType()); assertEquals(versionInfo.getVersion(), version.getVersion().intValue()); - io.pravega.schemaregistry.contract.data.EncodingInfo encodingInfo = ModelHelper.decode(new EncodingInfo().schemaInfo(schema).versionInfo(version).codecType(codecType)); - assertEquals(encodingInfo.getCodecType(), "custom"); + io.pravega.schemaregistry.contract.data.EncodingInfo encodingInfo = ModelHelper.decode( + new EncodingInfo().schemaInfo(schema).versionInfo(version).codecType(codecType)); + assertEquals(encodingInfo.getCodecType().getName(), "custom"); assertEquals(encodingInfo.getVersionInfo(), versionInfo); assertEquals(encodingInfo.getSchemaInfo(), schemaInfo); io.pravega.schemaregistry.contract.data.SchemaWithVersion schemaWithVersion = ModelHelper.decode(new SchemaWithVersion().schemaInfo(schema).versionInfo(version)); @@ -107,7 +109,7 @@ public void testEncode() { io.pravega.schemaregistry.contract.data.GroupProperties prop = io.pravega.schemaregistry.contract.data.GroupProperties .builder().serializationFormat(serializationFormat).compatibility(compatibility) .allowMultipleTypes(true).properties(ImmutableMap.of()).build(); - String codecType = "codecType"; + io.pravega.schemaregistry.contract.data.CodecType codecType = new io.pravega.schemaregistry.contract.data.CodecType("codecType"); // encode test VersionInfo version = ModelHelper.encode(versionInfo); @@ -126,8 +128,9 @@ public void testEncode() { EncodingId encodingId = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.EncodingId(0)); assertEquals(encodingId.getEncodingId().intValue(), 0); - EncodingInfo encodingInfo = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.EncodingInfo(versionInfo, schemaInfo, codecType)); - assertEquals(encodingInfo.getCodecType(), codecType); + EncodingInfo encodingInfo = ModelHelper.encode(new io.pravega.schemaregistry.contract.data.EncodingInfo( + versionInfo, schemaInfo, codecType)); + assertEquals(encodingInfo.getCodecType(), ModelHelper.encode(codecType)); assertEquals(encodingInfo.getVersionInfo(), version); assertEquals(encodingInfo.getSchemaInfo(), schema); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java index 6dbd2773b..0ff3c9b4a 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java @@ -9,6 +9,7 @@ */ package io.pravega.schemaregistry.codec; +import io.pravega.schemaregistry.contract.data.CodecType; import io.pravega.schemaregistry.contract.data.EncodingInfo; import java.io.IOException; @@ -19,13 +20,14 @@ */ public interface Codec { /** - * String name identifying the Codec Type. This should be same as the codecType that is registered for the group - * in schema registry service. The serializers will use this codec to encode the data and deserializers will find + * Codec Type object that contains a string name identifying the Codec Type. + * This name should be same as the codecType that is registered for the group in schema registry service. + * The serializers will use this codec to encode the data and deserializers will find * the decoder for the encoded data from {@link EncodingInfo#codecType} * * @return Name of the codec. */ - String getCodecType(); + CodecType getCodecType(); /** * Implementation should encode the remaining bytes in the buffer and return a new ByteBuffer that includes diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java index 27df460cc..6bb8b0477 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java @@ -69,7 +69,7 @@ private void initialize() { version = client.getVersionForSchema(groupId, schemaInfo); } if (encodeHeader) { - encodingId.set(client.getEncodingId(groupId, version, codec.getCodecType())); + encodingId.set(client.getEncodingId(groupId, version, codec.getCodecType().getName())); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java index a03ad2ccc..baf20bbe7 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java @@ -10,6 +10,7 @@ package io.pravega.schemaregistry.serializers; import io.pravega.schemaregistry.codec.Codec; +import io.pravega.schemaregistry.contract.data.CodecType; import lombok.Getter; import org.xerial.snappy.Snappy; @@ -24,24 +25,26 @@ * Utility class for creating codecs for none, snappy or gzip. */ public enum Codecs { - None(Constants.NOOP, Constants.NONE), - GzipCompressor(Constants.GZIP_CODEC, Constants.APPLICATION_X_GZIP), - SnappyCompressor(Constants.SNAPPY_CODEC, Constants.APPLICATION_X_SNAPPY_FRAMED); + None(Constants.NOOP, Constants.NOOP.getCodecType()), + GzipCompressor(Constants.GZIP_CODEC, Constants.GZIP_CODEC.getCodecType()), + SnappyCompressor(Constants.SNAPPY_CODEC, Constants.SNAPPY_CODEC.getCodecType()); @Getter private final Codec codec; @Getter - private final String mimeType; + private final CodecType codecType; - Codecs(Codec codec, String mimeType) { + Codecs(Codec codec, CodecType codecType) { this.codec = codec; - this.mimeType = mimeType; + this.codecType = codecType; } private static class Noop implements Codec { + private static final CodecType CODEC_TYPE_NONE = new CodecType(Constants.NONE); + @Override - public String getCodecType() { - return Constants.NONE; + public CodecType getCodecType() { + return CODEC_TYPE_NONE; } @Override @@ -56,9 +59,10 @@ public ByteBuffer decode(ByteBuffer data) { } private static class GZipCodec implements Codec { + private static final CodecType CODEC_TYPE_GZIP = new CodecType(Constants.APPLICATION_X_GZIP); @Override - public String getCodecType() { - return Constants.APPLICATION_X_GZIP; + public CodecType getCodecType() { + return CODEC_TYPE_GZIP; } @Override @@ -90,9 +94,10 @@ public ByteBuffer decode(ByteBuffer data) throws IOException { } private static class SnappyCodec implements Codec { + private static final CodecType CODEC_TYPE_SNAPPY = new CodecType(Constants.APPLICATION_X_SNAPPY_FRAMED); @Override - public String getCodecType() { - return Constants.APPLICATION_X_SNAPPY_FRAMED; + public CodecType getCodecType() { + return CODEC_TYPE_SNAPPY; } @Override diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index b3f9d8c15..779efc82d 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -15,6 +15,7 @@ import io.pravega.schemaregistry.client.SchemaRegistryClientConfig; import io.pravega.schemaregistry.codec.Codec; import io.pravega.schemaregistry.common.Either; +import io.pravega.schemaregistry.contract.data.CodecType; import io.pravega.schemaregistry.contract.data.Compatibility; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.GroupProperties; @@ -135,7 +136,7 @@ public static final class SerializerConfigBuilder { * @param decoder decoder function to use for decoding the data. * @return Builder. */ - public SerializerConfigBuilder addDecoder(String codecType, Function decoder) { + public SerializerConfigBuilder addDecoder(CodecType codecType, Function decoder) { this.decoder = new Decoder(codecType, decoder); return this; } @@ -208,9 +209,9 @@ public SerializerConfigBuilder registryConfig(SchemaRegistryClientConfig config) } static class Decoder { - private static final BiFunction DEFAULT = (x, y) -> { + private static final BiFunction DEFAULT = (x, y) -> { try { - switch (x) { + switch (x.getName()) { case Codecs.Constants.NONE: return Codecs.None.getCodec().decode(y); case Codecs.Constants.APPLICATION_X_GZIP: @@ -226,10 +227,10 @@ static class Decoder { }; @Getter(AccessLevel.PACKAGE) - private final Set codecTypes; - private final BiFunction decoder; + private final Set codecTypes; + private final BiFunction decoder; - private Decoder(String codecType, Function decoder) { + private Decoder(CodecType codecType, Function decoder) { this.decoder = (x, y) -> { if (x.equals(codecType)) { return decoder.apply(y); @@ -238,21 +239,21 @@ private Decoder(String codecType, Function decoder) { } }; codecTypes = new HashSet<>(); - this.codecTypes.add(Codecs.Constants.NONE); - this.codecTypes.add(Codecs.Constants.APPLICATION_X_GZIP); - this.codecTypes.add(Codecs.Constants.APPLICATION_X_SNAPPY_FRAMED); + this.codecTypes.add(Codecs.None.getCodecType()); + this.codecTypes.add(Codecs.GzipCompressor.getCodecType()); + this.codecTypes.add(Codecs.SnappyCompressor.getCodecType()); this.codecTypes.add(codecType); } private Decoder() { this.decoder = DEFAULT; codecTypes = new HashSet<>(); - this.codecTypes.add(Codecs.Constants.NONE); - this.codecTypes.add(Codecs.Constants.APPLICATION_X_GZIP); - this.codecTypes.add(Codecs.Constants.APPLICATION_X_SNAPPY_FRAMED); + this.codecTypes.add(Codecs.None.getCodecType()); + this.codecTypes.add(Codecs.GzipCompressor.getCodecType()); + this.codecTypes.add(Codecs.SnappyCompressor.getCodecType()); } - ByteBuffer decode(String codecType, ByteBuffer bytes) { + ByteBuffer decode(CodecType codecType, ByteBuffer bytes) { return decoder.apply(codecType, bytes); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java index 5c6560982..2145b17ae 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java @@ -11,6 +11,7 @@ import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; +import io.pravega.schemaregistry.contract.data.CodecType; import lombok.extern.slf4j.Slf4j; import java.util.List; @@ -52,7 +53,7 @@ private static void registerCodec(SchemaRegistryClient client, SerializerConfig private static void failOnCodecMismatch(SchemaRegistryClient client, SerializerConfig config) { if (config.isFailOnCodecMismatch()) { - List codecTypesInGroup = client.getCodecTypes(config.getGroupId()); + List codecTypesInGroup = client.getCodecTypes(config.getGroupId()); if (!config.getDecoder().getCodecTypes().containsAll(codecTypesInGroup)) { log.warn("Not all CodecTypes are supported by reader. Required codecTypes = {}", codecTypesInGroup); throw new RuntimeException(String.format("Need all codecTypes in %s", codecTypesInGroup.toString())); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java index e85664c61..d1c84e4ab 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java @@ -26,21 +26,21 @@ public class CodecTest { public void testCodec() throws IOException { byte[] testStringBytes = "this is a test string".getBytes(Charsets.UTF_8); Codec snappy = Codecs.SnappyCompressor.getCodec(); - assertEquals(snappy.getCodecType(), Codecs.SnappyCompressor.getMimeType()); + assertEquals(snappy.getCodecType(), Codecs.SnappyCompressor.getCodecType()); ByteBuffer encoded = snappy.encode(ByteBuffer.wrap(testStringBytes)); assertFalse(Arrays.equals(encoded.array(), testStringBytes)); ByteBuffer decoded = snappy.decode(encoded); assertTrue(Arrays.equals(decoded.array(), testStringBytes)); Codec gzip = Codecs.GzipCompressor.getCodec(); - assertEquals(gzip.getCodecType(), Codecs.GzipCompressor.getMimeType()); + assertEquals(gzip.getCodecType(), Codecs.GzipCompressor.getCodecType()); encoded = gzip.encode(ByteBuffer.wrap(testStringBytes)); assertFalse(Arrays.equals(encoded.array(), testStringBytes)); decoded = gzip.decode(encoded); assertTrue(Arrays.equals(decoded.array(), testStringBytes)); Codec none = Codecs.None.getCodec(); - assertEquals(none.getCodecType(), Codecs.None.getMimeType()); + assertEquals(none.getCodecType(), Codecs.None.getCodecType()); encoded = none.encode(ByteBuffer.wrap(testStringBytes)); assertTrue(Arrays.equals(encoded.array(), testStringBytes)); decoded = none.decode(encoded); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index 6bb96e4c0..cfeb3c863 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -79,15 +79,15 @@ public void testAvroSerializers() { doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); AvroSchema of = AvroSchema.of(SchemaDefinitions.ENUM); VersionInfo versionInfo3 = new VersionInfo(of.getSchema().getFullName(), 0, 2); doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(of.getSchemaInfo())); doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); - doAnswer(x -> new EncodingInfo(versionInfo3, of.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + doAnswer(x -> new EncodingInfo(versionInfo3, of.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); Serializer serializerStr = SerializerFactory.avroSerializer(config, of); GenericData.EnumSymbol enumSymbol = new GenericData.EnumSymbol(of.getSchema(), "a"); @@ -160,7 +160,7 @@ public void testAvroSerializersReflect() { .when(client).getGroupProperties(anyString()); doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); Serializer serializer = SerializerFactory.avroSerializer(config, schema1); @@ -189,8 +189,8 @@ public void testProtobufSerializers() throws IOException { doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); Serializer serializer = SerializerFactory.protobufSerializer(config, schema1); @@ -253,8 +253,8 @@ public void testJsonSerializers() throws JsonProcessingException { doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); Serializer serializer = SerializerFactory.jsonSerializer(config, schema1); @@ -282,7 +282,7 @@ public void testJsonSerializers() throws JsonProcessingException { VersionInfo versionInfo3 = new VersionInfo("myData", 0, 2); doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(myData.getSchemaInfo())); doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); - doAnswer(x -> new EncodingInfo(versionInfo3, myData.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + doAnswer(x -> new EncodingInfo(versionInfo3, myData.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); Serializer serializer2 = SerializerFactory.jsonSerializer(config, myData); Map jsonObject = new HashMap<>(); @@ -297,7 +297,7 @@ public void testJsonSerializers() throws JsonProcessingException { VersionInfo versionInfo4 = new VersionInfo("myData", 0, 3); doAnswer(x -> versionInfo4).when(client).getVersionForSchema(anyString(), eq(strSchema.getSchemaInfo())); doAnswer(x -> new EncodingId(3)).when(client).getEncodingId(anyString(), eq(versionInfo4), any()); - doAnswer(x -> new EncodingInfo(versionInfo4, strSchema.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(3))); + doAnswer(x -> new EncodingInfo(versionInfo4, strSchema.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(3))); Serializer serializer3 = SerializerFactory.jsonSerializer(config, strSchema); Serializer deserializer3 = SerializerFactory.jsonDeserializer(config, strSchema); @@ -367,9 +367,9 @@ public void testMultiformatDeserializers() throws IOException { doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); - doAnswer(x -> new EncodingInfo(versionInfo3, schema3.getSchemaInfo(), Codecs.None.getMimeType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> new EncodingInfo(versionInfo3, schema3.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); Serializer avroSerializer = SerializerFactory.avroSerializer(config, schema1); From 23e64708b688890473f629f08feda809a571fd79 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 8 Jul 2020 03:36:15 -0700 Subject: [PATCH 50/70] rename variable Signed-off-by: Shivesh Ranjan --- .../serializers/AvroSerializerFactory.java | 12 ++++----- .../serializers/CustomSerializerFactory.java | 4 +-- .../serializers/JsonSerializerFactory.java | 22 ++++++++-------- .../MultiFormatSerializerFactory.java | 26 +++++++++---------- .../ProtobufSerializerFactory.java | 22 ++++++++-------- .../serializers/SerializerConfig.java | 11 +++----- .../serializers/SerializerTest.java | 4 +-- 7 files changed, 49 insertions(+), 52 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java index 324ba7cf6..a44942845 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java @@ -31,7 +31,7 @@ class AvroSerializerFactory { static Serializer serializer(SerializerConfig config, AvroSchema schema) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schema); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); String groupId = config.getGroupId(); return new AvroSerializer<>(groupId, schemaRegistryClient, schema, config.getCodec(), config.isRegisterSchema()); @@ -40,7 +40,7 @@ static Serializer serializer(SerializerConfig config, AvroSchema schem static Serializer deserializer(SerializerConfig config, AvroSchema schema) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schema); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); String groupId = config.getGroupId(); @@ -51,7 +51,7 @@ static Serializer deserializer(SerializerConfig config, AvroSchema sch static Serializer genericDeserializer(SerializerConfig config, @Nullable AvroSchema schema) { Preconditions.checkNotNull(config); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); @@ -62,7 +62,7 @@ static Serializer genericDeserializer(SerializerConfig config, @Nullable static Serializer multiTypeSerializer(SerializerConfig config, Map, AvroSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); @@ -77,7 +77,7 @@ static Serializer multiTypeDeserializer( SerializerConfig config, Map, AvroSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); @@ -95,7 +95,7 @@ static Serializer> typedOrGenericDeserializer( SerializerConfig config, Map, AvroSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java index d4fb73731..05a2f64bb 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java @@ -35,7 +35,7 @@ static Serializer serializer(SerializerConfig config, Schema schema, C String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); return new AbstractSerializer(groupId, schemaRegistryClient, - schema, config.getCodec(), config.isRegisterSchema(), config.isTagWithEncodingId()) { + schema, config.getCodec(), config.isRegisterSchema(), config.isWriteEncodingHeader()) { @Override protected void serialize(T var, SchemaInfo schema, OutputStream outputStream) { serializer.serialize(var, schema, outputStream); @@ -54,7 +54,7 @@ static Serializer deserializer(SerializerConfig config, @Nullable Schema< EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); return new AbstractDeserializer(groupId, schemaRegistryClient, schema, false, - config.getDecoder(), encodingCache, config.isTagWithEncodingId()) { + config.getDecoder(), encodingCache, config.isWriteEncodingHeader()) { @Override protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { return deserializer.deserialize(inputStream, writerSchema, readerSchema); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java index 009678a09..5f6061ddb 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java @@ -34,7 +34,7 @@ static Serializer serializer(SerializerConfig config, JSONSchema schem String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); return new JsonSerializer<>(groupId, schemaRegistryClient, schema, config.getCodec(), - config.isRegisterSchema(), config.isTagWithEncodingId()); + config.isRegisterSchema(), config.isWriteEncodingHeader()); } static Serializer deserializer(SerializerConfig config, JSONSchema schema) { @@ -47,7 +47,7 @@ static Serializer deserializer(SerializerConfig config, JSONSchema sch // schema can be null in which case deserialization will happen into dynamic message return new JsonDeserlizer<>(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache, - config.isTagWithEncodingId()); + config.isWriteEncodingHeader()); } static Serializer> genericDeserializer(SerializerConfig config) { @@ -59,7 +59,7 @@ static Serializer> genericDeserializer(SerializerConfig con EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); return new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoder(), - encodingCache, config.isTagWithEncodingId()); + encodingCache, config.isWriteEncodingHeader()); } static Serializer jsonStringDeserializer(SerializerConfig config) { @@ -70,20 +70,20 @@ static Serializer jsonStringDeserializer(SerializerConfig config) { EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - return new JsonStringDeserializer(groupId, schemaRegistryClient, config.getDecoder(), encodingCache, config.isTagWithEncodingId()); + return new JsonStringDeserializer(groupId, schemaRegistryClient, config.getDecoder(), encodingCache, config.isWriteEncodingHeader()); } static Serializer multiTypeSerializer( SerializerConfig config, Map, JSONSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); Map, AbstractSerializer> serializerMap = schemas .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> new JsonSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), - config.isRegisterSchema(), config.isTagWithEncodingId()))); + config.isRegisterSchema(), config.isWriteEncodingHeader()))); return new MultiplexedSerializer<>(serializerMap); } @@ -91,7 +91,7 @@ static Serializer multiTypeDeserializer( SerializerConfig config, Map, JSONSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); @@ -100,7 +100,7 @@ static Serializer multiTypeDeserializer( Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), - encodingCache, config.isTagWithEncodingId()))); + encodingCache, config.isWriteEncodingHeader()))); return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), encodingCache); } @@ -109,7 +109,7 @@ static Serializer>> typedOrGenericDeserialize SerializerConfig config, Map, JSONSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); @@ -118,9 +118,9 @@ static Serializer>> typedOrGenericDeserialize Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache, - config.isTagWithEncodingId()))); + config.isWriteEncodingHeader()))); JsonGenericDeserializer genericDeserializer = new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoder(), - encodingCache, config.isTagWithEncodingId()); + encodingCache, config.isWriteEncodingHeader()); return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, config.getDecoder(), encodingCache); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java index d8a993c25..06ae68a56 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java @@ -43,13 +43,13 @@ class MultiFormatSerializerFactory { // region multi format static Serializer> serializer(SerializerConfig config) { Preconditions.checkNotNull(config); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); return serializerInternal(config, Collections.emptyMap()); } static Serializer> deserializerWithSchema(SerializerConfig config) { Preconditions.checkNotNull(config); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); return deserializerInternal(config, Collections.emptyMap(), NO_TRANSFORM); } @@ -73,7 +73,7 @@ static Serializer deserializeAsT(SerializerConfig config, BiFunction transform) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(transform); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); return deserializeAsTInternal(config, Collections.emptyMap(), transform); } // endregion @@ -81,7 +81,7 @@ static Serializer deserializeAsT(SerializerConfig config, private static Serializer> serializerInternal(SerializerConfig config, Map> customSerializers) { Preconditions.checkNotNull(config); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); String groupId = config.getGroupId(); @@ -99,9 +99,9 @@ private static Serializer deserializeAsTInternal(SerializerConfig config, EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); AbstractDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, - config.getDecoder(), encodingCache, config.isTagWithEncodingId()); + config.getDecoder(), encodingCache, config.isWriteEncodingHeader()); AbstractDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), - encodingCache, config.isTagWithEncodingId()); + encodingCache, config.isWriteEncodingHeader()); AbstractDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), encodingCache); @@ -112,7 +112,7 @@ private static Serializer deserializeAsTInternal(SerializerConfig config, deserializers.forEach((key, value) -> { map.put(key, new AbstractDeserializer(groupId, schemaRegistryClient, null, false, - config.getDecoder(), encodingCache, config.isTagWithEncodingId()) { + config.getDecoder(), encodingCache, config.isWriteEncodingHeader()) { @Override protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { return value.deserialize(inputStream, writerSchema, readerSchema); @@ -131,9 +131,9 @@ private static Serializer> deserializerInternal(SerializerConf EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); AbstractDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, - config.getDecoder(), encodingCache, config.isTagWithEncodingId()); + config.getDecoder(), encodingCache, config.isWriteEncodingHeader()); AbstractDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), - encodingCache, config.isTagWithEncodingId()); + encodingCache, config.isWriteEncodingHeader()); AbstractDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), encodingCache); @@ -144,7 +144,7 @@ private static Serializer> deserializerInternal(SerializerConf deserializers.forEach((key, value) -> { map.put(key, new AbstractDeserializer(groupId, schemaRegistryClient, null, false, - config.getDecoder(), encodingCache, config.isTagWithEncodingId()) { + config.getDecoder(), encodingCache, config.isWriteEncodingHeader()) { @Override protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { return value.deserialize(inputStream, writerSchema, readerSchema); @@ -166,11 +166,11 @@ private static AbstractSerializer getPravegaSerializer( AvroSchema.from(schemaInfo), config.getCodec(), config.isRegisterSchema()); case Protobuf: ProtobufSerializer m = new ProtobufSerializer<>(groupId, schemaRegistryClient, - ProtobufSchema.from(schemaInfo), config.getCodec(), config.isRegisterSchema(), config.isTagWithEncodingId()); + ProtobufSchema.from(schemaInfo), config.getCodec(), config.isRegisterSchema(), config.isWriteEncodingHeader()); return (AbstractSerializer) m; case Json: return new JsonSerializer<>(groupId, schemaRegistryClient, JSONSchema.from(schemaInfo), - config.getCodec(), config.isRegisterSchema(), config.isTagWithEncodingId()); + config.getCodec(), config.isRegisterSchema(), config.isWriteEncodingHeader()); case Custom: return getCustomSerializer(config, customSerializers, schemaRegistryClient, groupId, schemaInfo); default: @@ -184,7 +184,7 @@ private static AbstractSerializer getCustomSerializer( if (customSerializers.containsKey(schemaInfo.getSerializationFormat())) { CustomSerializer serializer = customSerializers.get(schemaInfo.getSerializationFormat()); return new AbstractSerializer(groupId, schemaRegistryClient, - () -> schemaInfo, config.getCodec(), config.isRegisterSchema(), config.isTagWithEncodingId()) { + () -> schemaInfo, config.getCodec(), config.isRegisterSchema(), config.isWriteEncodingHeader()) { @Override protected void serialize(Object var, SchemaInfo schema, OutputStream outputStream) { serializer.serialize(var, schema, outputStream); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java index fa568fd95..9f08b5df2 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java @@ -38,7 +38,7 @@ static Serializer serializer(SerializerConfig config, String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); return new ProtobufSerializer<>(groupId, schemaRegistryClient, schema, config.getCodec(), - config.isRegisterSchema(), config.isTagWithEncodingId()); + config.isRegisterSchema(), config.isWriteEncodingHeader()); } static Serializer deserializer(SerializerConfig config, @@ -52,12 +52,12 @@ static Serializer deserializer(SerializerConfi // schema can be null in which case deserialization will happen into dynamic message return new ProtobufDeserlizer<>(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache, - config.isTagWithEncodingId()); + config.isWriteEncodingHeader()); } static Serializer genericDeserializer(SerializerConfig config, @Nullable ProtobufSchema schema) { Preconditions.checkNotNull(config); - Preconditions.checkArgument(schema != null || config.isTagWithEncodingId(), + Preconditions.checkArgument(schema != null || config.isWriteEncodingHeader(), "Either read schema should be supplied or events should be tagged with encoding ids."); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); @@ -65,21 +65,21 @@ static Serializer genericDeserializer(SerializerConfig config, @ EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); return new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache, - config.isTagWithEncodingId()); + config.isWriteEncodingHeader()); } static Serializer multiTypeSerializer( SerializerConfig config, Map, ProtobufSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); Map, AbstractSerializer> serializerMap = schemas .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> new ProtobufSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), - config.isRegisterSchema(), config.isTagWithEncodingId()))); + config.isRegisterSchema(), config.isWriteEncodingHeader()))); return new MultiplexedSerializer<>(serializerMap); } @@ -87,7 +87,7 @@ static Serializer multiTypeDeserializer( SerializerConfig config, Map, ProtobufSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); @@ -96,7 +96,7 @@ static Serializer multiTypeDeserializer( Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache, - config.isTagWithEncodingId()))); + config.isWriteEncodingHeader()))); return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), encodingCache); } @@ -104,7 +104,7 @@ static Serializer> type SerializerConfig config, Map, ProtobufSchema> schemas) { Preconditions.checkNotNull(config); Preconditions.checkNotNull(schemas); - Preconditions.checkArgument(config.isTagWithEncodingId(), "Events should be tagged with encoding ids."); + Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); @@ -113,9 +113,9 @@ static Serializer> type Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache, - config.isTagWithEncodingId()))); + config.isWriteEncodingHeader()))); ProtobufGenericDeserlizer genericDeserializer = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, - config.getDecoder(), encodingCache, config.isTagWithEncodingId()); + config.getDecoder(), encodingCache, config.isWriteEncodingHeader()); return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, config.getDecoder(), encodingCache); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index 779efc82d..a6d0bbc05 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -74,13 +74,11 @@ public class SerializerConfig { * Tells the deserializer that if supplied decoder codecTypes do not match group codecTypes then fail and exit upfront. */ private final boolean failOnCodecMismatch; - /** * Flag to tell the serializer/deserializer if the group should be created automatically. * It is recommended to register keep this flag as false in production systems and create groups and add schemas */ private final boolean createGroup; - /** * Flag to tell the serializer/deserializer if the encoding id should be added as a header with each event. * By default this is set to true. If users choose to not add the header, they should do so in all their writer and @@ -91,8 +89,7 @@ public class SerializerConfig { * If streams can have multiple types of events, this cannot be false. * If streams can multiple formats of events, this cannot be false. */ - private final boolean tagWithEncodingId; - + private final boolean writeEncodingHeader; /** * Group properties to use for creating the group if createGroup is set to true. */ @@ -100,8 +97,7 @@ public class SerializerConfig { private SerializerConfig(String groupId, Either registryConfigOrClient, boolean registerSchema, boolean registerCodec, Codec codec, Decoder decoder, boolean failOnCodecMismatch, - boolean createGroup, boolean tagWithEncodingId, GroupProperties groupProperties) { - this.tagWithEncodingId = tagWithEncodingId; + boolean createGroup, boolean writeEncodingHeader, boolean validateObject, GroupProperties groupProperties) { Preconditions.checkArgument(!Strings.isNullOrEmpty(groupId), "Group id needs to be supplied"); Preconditions.checkArgument(registryConfigOrClient != null, "Either registry client or config needs to be supplied"); this.groupId = groupId; @@ -112,6 +108,7 @@ private SerializerConfig(String groupId, Either registryConfigOrClient = null; private GroupProperties groupProperties = GroupProperties.builder().build(); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index cfeb3c863..95134ba2e 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -408,7 +408,7 @@ public void testMultiformatDeserializers() throws IOException { public void testNoEncodingProto() throws IOException { SchemaRegistryClient client = mock(SchemaRegistryClient.class); SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId") - .tagWithEncodingId(false).build(); + .writeEncodingHeader(false).build(); Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); byte[] schemaBytes = Files.readAllBytes(path); DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); @@ -449,7 +449,7 @@ public void testNoEncodingProto() throws IOException { public void testNoEncodingJson() throws IOException { SchemaRegistryClient client = mock(SchemaRegistryClient.class); SerializerConfig config = SerializerConfig.builder().registryClient(client).groupId("groupId") - .tagWithEncodingId(false).build(); + .writeEncodingHeader(false).build(); JSONSchema schema1 = JSONSchema.of(DerivedUser1.class); VersionInfo versionInfo1 = new VersionInfo("name", 0, 0); From 4925759ebfaf5fc81f27ed7fc0b3e964b516f958 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 8 Jul 2020 03:42:05 -0700 Subject: [PATCH 51/70] remove validate object Signed-off-by: Shivesh Ranjan --- .../io/pravega/schemaregistry/serializers/SerializerConfig.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index a6d0bbc05..cf6fb382c 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -97,7 +97,7 @@ public class SerializerConfig { private SerializerConfig(String groupId, Either registryConfigOrClient, boolean registerSchema, boolean registerCodec, Codec codec, Decoder decoder, boolean failOnCodecMismatch, - boolean createGroup, boolean writeEncodingHeader, boolean validateObject, GroupProperties groupProperties) { + boolean createGroup, boolean writeEncodingHeader, GroupProperties groupProperties) { Preconditions.checkArgument(!Strings.isNullOrEmpty(groupId), "Group id needs to be supplied"); Preconditions.checkArgument(registryConfigOrClient != null, "Either registry client or config needs to be supplied"); this.groupId = groupId; From 1dafddbdb3f9962e8db6779040779e590090d281 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 8 Jul 2020 06:17:57 -0700 Subject: [PATCH 52/70] ProtobufSchema Signed-off-by: Shivesh Ranjan --- .../schemas/ProtobufSchema.java | 122 +++++++++++++----- 1 file changed, 91 insertions(+), 31 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java index e057f3e23..56b3bead5 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java @@ -10,7 +10,6 @@ package io.pravega.schemaregistry.schemas; import com.google.common.collect.ImmutableMap; -import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DynamicMessage; import com.google.protobuf.GeneratedMessageV3; import com.google.protobuf.InvalidProtocolBufferException; @@ -24,9 +23,11 @@ import java.lang.reflect.InvocationTargetException; import java.nio.ByteBuffer; +import static com.google.protobuf.DescriptorProtos.*; + /** * Container class for protobuf schema. - * Protobuf schemas are represented using {@link com.google.protobuf.DescriptorProtos.FileDescriptorSet}. + * Protobuf schemas are represented using {@link FileDescriptorSet}. * * @param Type of element. */ @@ -35,17 +36,17 @@ public class ProtobufSchema implements Schema { @Getter private final Parser parser; @Getter - private final DescriptorProtos.FileDescriptorSet descriptorProto; + private final FileDescriptorSet descriptorProto; private final SchemaInfo schemaInfo; - private ProtobufSchema(String name, Parser parser, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { + private ProtobufSchema(String name, Parser parser, FileDescriptorSet fileDescriptorSet) { this.parser = parser; this.descriptorProto = fileDescriptorSet; this.schemaInfo = new SchemaInfo(name, SerializationFormat.Protobuf, getSchemaBytes(), ImmutableMap.of()); } - private ProtobufSchema(DescriptorProtos.FileDescriptorSet fileDescriptorSet, SchemaInfo schemaInfo) { + private ProtobufSchema(FileDescriptorSet fileDescriptorSet, SchemaInfo schemaInfo) { this.parser = null; this.descriptorProto = fileDescriptorSet; this.schemaInfo = schemaInfo; @@ -61,28 +62,36 @@ public SchemaInfo getSchemaInfo() { } /** - * Method to generate protobuf schema from the supplied protobuf generated class and {@link DescriptorProtos.FileDescriptorSet}. + * Method to generate protobuf schema from the supplied protobuf generated class. + * If the description of protobuf object is contained in a single .proto file, then this method creates the + * {@link FileDescriptorSet} from the generated class. + * + * @param tClass Class for code generated protobuf message. + * @param Type of protobuf message + * @return {@link ProtobufSchema} with generic type T that captures protobuf schema and parser. + */ + public static ProtobufSchema of(Class tClass) { + Extractor extractor = new Extractor<>(tClass).invoke(); + + return new ProtobufSchema(extractor.getFullName(), extractor.getParser(), + extractor.getFileDescriptorSet()); + } + + /** + * Method to generate protobuf schema from the supplied protobuf generated class and {@link FileDescriptorSet}. * * @param tClass Class for code generated protobuf message. * @param fileDescriptorSet file descriptor set representing a protobuf schema. * @param Type of protobuf message * @return {@link ProtobufSchema} with generic type T that captures protobuf schema and parser. */ - @SuppressWarnings("unchecked") - public static ProtobufSchema of(Class tClass, - DescriptorProtos.FileDescriptorSet fileDescriptorSet) { - T defaultInstance; - try { - defaultInstance = (T) tClass.getMethod("getDefaultInstance").invoke(null); - } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { - throw new IllegalArgumentException(e); - } - Parser tParser = (Parser) defaultInstance.getParserForType(); - return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getFullName(), tParser, fileDescriptorSet); + public static ProtobufSchema of(Class tClass, FileDescriptorSet fileDescriptorSet) { + Extractor extractor = new Extractor<>(tClass).invoke(); + return new ProtobufSchema(extractor.getFullName(), extractor.getParser(), fileDescriptorSet); } /** - * Method to generate protobuf schema of generic type {@link DynamicMessage} using the {@link DescriptorProtos.FileDescriptorSet}. + * Method to generate protobuf schema of generic type {@link DynamicMessage} using the {@link FileDescriptorSet}. * It is for representing protobuf schemas to be used for generic deserialization of protobuf serialized payload into * {@link DynamicMessage}. * Note: this does not have a protobuf parser and can only be used during deserialization. @@ -91,13 +100,13 @@ public static ProtobufSchema of(Class tClas * @param fileDescriptorSet file descriptor set representing a protobuf schema. * @return {@link ProtobufSchema} with generic type {@link DynamicMessage} that captures protobuf schema. */ - public static ProtobufSchema of(String name, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { + public static ProtobufSchema of(String name, FileDescriptorSet fileDescriptorSet) { return new ProtobufSchema<>(name, null, fileDescriptorSet); } /** - * Method to generate protobuf schema from the supplied protobuf generated class and {@link DescriptorProtos.FileDescriptorSet}. - * It is same as {@link #of(Class, DescriptorProtos.FileDescriptorSet)} except that it returns a Protobuf schema + * Method to generate protobuf schema from the supplied protobuf generated class and {@link FileDescriptorSet}. + * It is same as {@link #of(Class, FileDescriptorSet)} except that it returns a Protobuf schema * typed {@link GeneratedMessageV3}. * It is useful in multiplexed deserializer to pass all objects to deserialize into as base {@link GeneratedMessageV3} objects. * @@ -108,16 +117,29 @@ public static ProtobufSchema of(String name, DescriptorProtos.Fi */ @SuppressWarnings("unchecked") public static ProtobufSchema ofGeneratedMessageV3( - Class tClass, DescriptorProtos.FileDescriptorSet fileDescriptorSet) { - T defaultInstance; - try { - defaultInstance = (T) tClass.getMethod("getDefaultInstance").invoke(null); - } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { - throw new IllegalArgumentException(e); - } - Parser tParser = (Parser) defaultInstance.getParserForType(); + Class tClass, FileDescriptorSet fileDescriptorSet) { + Extractor extractor = new Extractor<>(tClass).invoke(); - return new ProtobufSchema<>(defaultInstance.getDescriptorForType().getFullName(), tParser, fileDescriptorSet); + return new ProtobufSchema<>(extractor.getFullName(), (Parser) extractor.getParser(), fileDescriptorSet); + } + + /** + * Method to generate protobuf schema from the supplied protobuf generated class. It creates the {@link FileDescriptorSet} + * from the generated class. + * This method is same as {@link #of(Class, FileDescriptorSet)} except that it returns a Protobuf schema + * typed {@link GeneratedMessageV3}. + * It is useful in multiplexed deserializer to pass all objects to deserialize into as base {@link GeneratedMessageV3} objects. + * + * @param tClass Class for code generated protobuf message. + * @param Type of protobuf message + * @return {@link ProtobufSchema} with generic type {@link GeneratedMessageV3} that captures protobuf schema and parser of type T. + */ + @SuppressWarnings("unchecked") + public static ProtobufSchema ofGeneratedMessageV3(Class tClass) { + Extractor extractor = new Extractor<>(tClass).invoke(); + + return new ProtobufSchema<>(extractor.getFullName(), + (Parser) extractor.getParser(), extractor.getFileDescriptorSet()); } /** @@ -128,12 +150,50 @@ public static ProtobufSchema */ public static ProtobufSchema from(SchemaInfo schemaInfo) { try { - DescriptorProtos.FileDescriptorSet fileDescriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaInfo.getSchemaData()); + FileDescriptorSet fileDescriptorSet = FileDescriptorSet.parseFrom(schemaInfo.getSchemaData()); return new ProtobufSchema<>(fileDescriptorSet, schemaInfo); } catch (InvalidProtocolBufferException ex) { throw new IllegalArgumentException(ex); } } + + private static class Extractor { + private Class tClass; + private T defaultInstance; + private Parser tParser; + + Extractor(Class tClass) { + this.tClass = tClass; + } + + T getDefaultInstance() { + return defaultInstance; + } + + Parser getParser() { + return tParser; + } + + String getFullName() { + return defaultInstance.getDescriptorForType().getFullName(); + } + + FileDescriptorSet getFileDescriptorSet() { + return FileDescriptorSet + .newBuilder().addFile(defaultInstance.getDescriptorForType().getFile().toProto()).build(); + } + + @SuppressWarnings("unchecked") + Extractor invoke() { + try { + defaultInstance = (T) tClass.getMethod("getDefaultInstance").invoke(null); + } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { + throw new IllegalArgumentException(e); + } + tParser = (Parser) defaultInstance.getParserForType(); + return this; + } + } } From 53b8f9c326a540f2b511cc4ec32d3a1b3221b2f5 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 9 Jul 2020 22:10:10 -0700 Subject: [PATCH 53/70] minor refactoring Signed-off-by: Shivesh Ranjan --- .../serializers/AbstractDeserializer.java | 6 +- .../serializers/AbstractSerializer.java | 4 +- .../serializers/BaseDeserializer.java | 21 ++++++ .../serializers/BaseSerializer.java | 21 ++++++ .../serializers/SerializerConfig.java | 71 ++++++++++++------- .../serializers/SerializerFactory.java | 2 +- .../schemaregistry/schemas/TestSchemas.java | 15 ++++ 7 files changed, 108 insertions(+), 32 deletions(-) create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseDeserializer.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseSerializer.java diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java index aa13f8b88..b3f9871dc 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java @@ -12,7 +12,6 @@ import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; -import io.pravega.schemaregistry.contract.data.GroupProperties; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.Schema; import lombok.SneakyThrows; @@ -26,8 +25,7 @@ import java.nio.ByteBuffer; @Slf4j -abstract class AbstractDeserializer extends FailingSerializer { - private static final byte PROTOCOL = 0x0; +abstract class AbstractDeserializer extends BaseDeserializer { private static final int HEADER_SIZE = 1 + Integer.BYTES; private final String groupId; @@ -60,8 +58,6 @@ protected AbstractDeserializer(String groupId, @Synchronized private void initialize() { - GroupProperties groupProperties = client.getGroupProperties(groupId); - if (schemaInfo != null) { log.info("Validate caller supplied schema."); if (!client.canReadUsing(groupId, schemaInfo)) { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java index 6bb8b0477..570f07f26 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java @@ -25,8 +25,8 @@ import java.nio.ByteBuffer; import java.util.concurrent.atomic.AtomicReference; -abstract class AbstractSerializer extends FailingSerializer { - private static final byte PROTOCOL = 0x0; +abstract class AbstractSerializer extends BaseSerializer { + private static final byte PROTOCOL = 0x1; private final String groupId; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseDeserializer.java new file mode 100644 index 000000000..07538d1f2 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseDeserializer.java @@ -0,0 +1,21 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.client.stream.Serializer; + +import java.nio.ByteBuffer; + +abstract class BaseDeserializer implements Serializer { + @Override + public final ByteBuffer serialize(T value) { + throw new IllegalStateException(); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseSerializer.java new file mode 100644 index 000000000..7d77bda36 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseSerializer.java @@ -0,0 +1,21 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.serializers; + +import io.pravega.client.stream.Serializer; + +import java.nio.ByteBuffer; + +abstract class BaseSerializer implements Serializer { + @Override + public final T deserialize(ByteBuffer serializedValue) { + throw new IllegalStateException(); + } +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index cf6fb382c..c562ca553 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -24,6 +24,7 @@ import lombok.Builder; import lombok.Data; import lombok.Getter; +import lombok.NonNull; import java.io.IOException; import java.nio.ByteBuffer; @@ -41,12 +42,20 @@ public class SerializerConfig { /** * Name of the group. */ + @NonNull private final String groupId; /** * Either the registry client or the {@link SchemaRegistryClientConfig} that can be used for creating a new registry client. * Exactly one of the two option has to be supplied. */ - private final Either registryConfigOrClient; + @Getter(AccessLevel.NONE) + private final SchemaRegistryClientConfig registryConfig; + /** + * Either the registry client or the {@link SchemaRegistryClientConfig} that can be used for creating a new registry client. + * Exactly one of the two option has to be supplied. + */ + @Getter(AccessLevel.NONE) + private final SchemaRegistryClient registryClient; /** * Flag to tell the serializer if the schema should be automatically registered before using it in {@link io.pravega.client.stream.EventStreamWriter}. * It is recommended to register keep this flag as false in production systems and manage schema evolution explicitly and @@ -78,7 +87,8 @@ public class SerializerConfig { * Flag to tell the serializer/deserializer if the group should be created automatically. * It is recommended to register keep this flag as false in production systems and create groups and add schemas */ - private final boolean createGroup; + @Getter(AccessLevel.NONE) + private final GroupProperties createGroup; /** * Flag to tell the serializer/deserializer if the encoding id should be added as a header with each event. * By default this is set to true. If users choose to not add the header, they should do so in all their writer and @@ -90,18 +100,15 @@ public class SerializerConfig { * If streams can multiple formats of events, this cannot be false. */ private final boolean writeEncodingHeader; - /** - * Group properties to use for creating the group if createGroup is set to true. - */ - private final GroupProperties groupProperties; - private SerializerConfig(String groupId, Either registryConfigOrClient, + private SerializerConfig(String groupId, SchemaRegistryClientConfig config, SchemaRegistryClient client, boolean registerSchema, boolean registerCodec, Codec codec, Decoder decoder, boolean failOnCodecMismatch, - boolean createGroup, boolean writeEncodingHeader, GroupProperties groupProperties) { + GroupProperties createGroup, boolean writeEncodingHeader) { Preconditions.checkArgument(!Strings.isNullOrEmpty(groupId), "Group id needs to be supplied"); - Preconditions.checkArgument(registryConfigOrClient != null, "Either registry client or config needs to be supplied"); + Preconditions.checkArgument(client != null || config != null, "Either registry client or config needs to be supplied"); this.groupId = groupId; - this.registryConfigOrClient = registryConfigOrClient; + this.registryClient = client; + this.registryConfig = config; this.registerSchema = registerSchema; this.registerCodec = registerCodec; this.codec = codec; @@ -109,7 +116,22 @@ private SerializerConfig(String groupId, Either getRegistryConfigOrClient() { + if (registryClient == null) { + return Either.left(registryConfig); + } else { + return Either.right(registryClient); + } + } + + public boolean isCreateGroup() { + return createGroup != null; + } + + GroupProperties getGroupProperties() { + return createGroup; } public static final class SerializerConfigBuilder { @@ -119,13 +141,11 @@ public static final class SerializerConfigBuilder { private boolean registerSchema = false; private boolean registerCodec = false; - private boolean createGroup = false; private boolean failOnCodecMismatch = true; private boolean writeEncodingHeader = true; - private Either registryConfigOrClient = null; - - private GroupProperties groupProperties = GroupProperties.builder().build(); - + private SchemaRegistryClientConfig registryConfig = null; + private SchemaRegistryClient registryClient = null; + /** * Add codec type to corresponding decoder function which will be used to decode data encoded using encoding type codecType. * @@ -173,36 +193,39 @@ public SerializerConfigBuilder createGroup(SerializationFormat serializationForm * @return Builder */ public SerializerConfigBuilder createGroup(SerializationFormat serializationFormat, Compatibility policy, boolean allowMultipleTypes) { - this.createGroup = true; - this.groupProperties = new GroupProperties(serializationFormat, policy, allowMultipleTypes); + this.createGroup = new GroupProperties(serializationFormat, policy, allowMultipleTypes); return this; } /** - * Schema Registry client. Either this or config should be supplied. Whichever is supplied later overrides - * the other. + * Schema Registry client. Either of client or config should be supplied. * * @param client Schema Registry client * @return Builder */ public SerializerConfigBuilder registryClient(SchemaRegistryClient client) { Preconditions.checkArgument(client != null); - this.registryConfigOrClient = Either.right(client); + Preconditions.checkState(registryConfig == null, "Cannot specify both client and config"); + this.registryClient = client; return this; } /** - * Schema Registry client config. Either this or client should be supplied. Whichever is supplied later overrides - * the other. + * Schema Registry client config which is used to initialize the schema registry client. + * Either config or client should be supplied. * * @param config Schema Registry client configuration. * @return Builder */ public SerializerConfigBuilder registryConfig(SchemaRegistryClientConfig config) { Preconditions.checkArgument(config != null); - this.registryConfigOrClient = Either.left(config); + Preconditions.checkState(registryClient == null, "Cannot specify both client and config"); + this.registryConfig = config; return this; } + + // writeEncoding header vs codec vs decoder + // if codec is supplied } static class Decoder { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index dddeadd02..8d9fc49c6 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -34,7 +34,7 @@ @Slf4j public class SerializerFactory { - public static final String PRAVEGA_EVENT_HEADER = "pravegaEventHeader"; + public static final String PRAVEGA_EVENT_HEADER = "PravegaEventHeader"; // region avro /** diff --git a/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java b/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java index a325d8ed7..247e40928 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java @@ -58,6 +58,21 @@ public void testAvroSchema() { @Test public void testProtobufSchema() throws IOException { + ProtobufSchema sm1 = ProtobufSchema.of(ProtobufTest.Message1.class); + assertNotNull(sm1.getParser()); + assertNotNull(sm1.getDescriptorProto()); + assertEquals(sm1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema bm1 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message1.class); + assertNotNull(bm1.getParser()); + assertNotNull(bm1.getDescriptorProto()); + assertEquals(bm1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + + ProtobufSchema bm2 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class); + assertNotNull(bm2.getParser()); + assertNotNull(bm2.getDescriptorProto()); + assertEquals(bm2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); + Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); byte[] schemaBytes = Files.readAllBytes(path); DescriptorProtos.FileDescriptorSet descriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(schemaBytes); From a744198df7a02d6e1add4e1875ee4ecba168d3bd Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Fri, 10 Jul 2020 06:46:45 -0700 Subject: [PATCH 54/70] PR comments Signed-off-by: Shivesh Ranjan --- gradle.properties | 3 +- .../schemaregistry/GroupIdGenerator.java | 2 +- .../pravega/schemaregistry/codec/Codec.java | 2 +- .../schemaregistry/schemas/JSONSchema.java | 43 +++++++++++++++---- .../schemas/ProtobufSchema.java | 18 +++----- .../serializers/AbstractDeserializer.java | 6 ++- .../serializers/AbstractSerializer.java | 38 ++++++++-------- .../schemaregistry/serializers/Codecs.java | 30 +++++-------- ...nDeserlizer.java => JsonDeserializer.java} | 8 ++-- .../serializers/JsonSerializerFactory.java | 6 +-- 10 files changed, 87 insertions(+), 69 deletions(-) rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{JsonDeserlizer.java => JsonDeserializer.java} (85%) diff --git a/gradle.properties b/gradle.properties index 7b0778c49..c34dae725 100644 --- a/gradle.properties +++ b/gradle.properties @@ -26,7 +26,7 @@ gradleLombokPluginVersion=3.2.0 gradleSshPluginVersion=2.9.0 guavaVersion=28.1-jre javaxServletApiVersion=4.0.0 -jacksonVersion=2.10.3 +jacksonVersion=2.11.1 javaxwsrsApiVersion=2.1 jaxbVersion=2.3.0 javaxAnnotationVersion=1.3.2 @@ -46,6 +46,7 @@ avroVersion=1.9.1 avroProtobufVersion=1.7.7 snappyVersion=1.1.7.3 pravegaVersion=0.7.0 +pravegaKeyCloakVersion=0.7.0 # Version and base tags can be overridden at build time schemaregistryVersion=0.0.1-SNAPSHOT diff --git a/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java b/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java index 03c9eb8bc..6792667a3 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java @@ -24,7 +24,7 @@ public static String getGroupId(Scheme scheme, String... args) { switch (scheme) { case QualifiedStreamName: Preconditions.checkNotNull(args); - Preconditions.checkArgument(args.length == 2); + Preconditions.checkArgument(args.length == 2, "Both scope and stream name should be supplied."); return NameUtils.getScopedStreamName(args[0], args[1]); default: throw new IllegalArgumentException("Unknown Group id generation schema."); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java index 0ff3c9b4a..b2787ed71 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java @@ -23,7 +23,7 @@ public interface Codec { * Codec Type object that contains a string name identifying the Codec Type. * This name should be same as the codecType that is registered for the group in schema registry service. * The serializers will use this codec to encode the data and deserializers will find - * the decoder for the encoded data from {@link EncodingInfo#codecType} + * the decoder for the encoded data from {@link EncodingInfo#getCodecType()} * * @return Name of the codec. */ diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java index 9b2d849ae..12c166491 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -14,6 +14,8 @@ import com.fasterxml.jackson.module.jsonSchema.JsonSchema; import com.fasterxml.jackson.module.jsonSchema.JsonSchemaGenerator; import com.google.common.base.Charsets; +import com.google.common.base.Preconditions; +import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; @@ -46,11 +48,8 @@ private JSONSchema(JsonSchema schema, String name, String schemaString, Class } private JSONSchema(JsonSchema schema, String name, String schemaString, Class base, Class derived) { - String type = name != null ? name : schema.getId(); - // Add empty name if the name is not supplied and cannot be extracted from the json schema id. - type = type != null ? type : ""; this.schemaString = schemaString; - this.schemaInfo = new SchemaInfo(type, SerializationFormat.Json, getSchemaBytes(), ImmutableMap.of()); + this.schemaInfo = new SchemaInfo(name, SerializationFormat.Json, getSchemaBytes(), ImmutableMap.of()); this.base = base; this.tClass = derived; this.schema = schema; @@ -73,6 +72,7 @@ private JSONSchema(SchemaInfo schemaInfo, JsonSchema schema, String schemaString * @return {@link JSONSchema} with generic type T that extracts and captures the json schema. */ public static JSONSchema of(Class tClass) { + Preconditions.checkNotNull(tClass); try { JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(OBJECT_MAPPER); JsonSchema schema = schemaGen.generateSchema(tClass); @@ -80,23 +80,45 @@ public static JSONSchema of(Class tClass) { return new JSONSchema<>(schema, tClass.getName(), schemaString, tClass); } catch (JsonProcessingException e) { - throw new IllegalArgumentException(e); + throw new IllegalArgumentException("Unable to get json schema from the class", e); } } /** * Method to create a typed JSONSchema of type {@link Object} from the given schema. + * This method can be used to pass Json schema string which can be used to represent primitive data types. * - * @param type type of object identified by {@link SchemaInfo#type}. + * @param type type of object identified by {@link SchemaInfo#getType()}. + * @param schema Schema to use. + * @return Returns an JSONSchema with {@link Object} type. + */ + public static JSONSchema of(String type, JsonSchema schema) { + Preconditions.checkNotNull(type); + Preconditions.checkNotNull(schema); + try { + String schemaString = OBJECT_MAPPER.writeValueAsString(schema); + + return new JSONSchema<>(schema, type, schemaString, Object.class); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException("Unable to get json schema string from the JsonSchema object", e); + } + } + + /** + * Method to create a typed JSONSchema of type {@link Object} from the given schema string. + * + * @param type type of object identified by {@link SchemaInfo#getType()}. * @param schemaString Schema string to use. * @return Returns an JSONSchema with {@link Object} type. */ public static JSONSchema of(String type, String schemaString) { + Preconditions.checkNotNull(type, "Type cannot be null."); + Preconditions.checkArgument(!Strings.isNullOrEmpty(schemaString), "Schema String cannot be null or empty."); try { JsonSchema schema = OBJECT_MAPPER.readValue(schemaString, JsonSchema.class); return new JSONSchema<>(schema, type, schemaString, Object.class); } catch (JsonProcessingException e) { - throw new IllegalArgumentException(e); + throw new IllegalArgumentException("Unable to parse schema string", e); } } @@ -111,6 +133,8 @@ public static JSONSchema of(String type, String schemaString) { * @return Returns an AvroSchema with {@link SpecificRecordBase} type. */ public static JSONSchema ofBaseType(Class tDerived, Class tBase) { + Preconditions.checkNotNull(tDerived); + Preconditions.checkNotNull(tBase); try { JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(OBJECT_MAPPER); JsonSchema schema = schemaGen.generateSchema(tDerived); @@ -118,7 +142,7 @@ public static JSONSchema ofBaseType(Class tDerived, Class return new JSONSchema<>(schema, tDerived.getName(), schemaString, tBase, tDerived); } catch (JsonProcessingException e) { - throw new IllegalArgumentException(e); + throw new IllegalArgumentException("Unable to get json schema from the class", e); } } @@ -129,13 +153,14 @@ public static JSONSchema ofBaseType(Class tDerived, Class * @return Returns an JSONSchema with {@link Object} type. */ public static JSONSchema from(SchemaInfo schemaInfo) { + Preconditions.checkNotNull(schemaInfo); try { String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); JsonSchema schema = OBJECT_MAPPER.readValue(schemaString, JsonSchema.class); return new JSONSchema<>(schemaInfo, schema, schemaString, Object.class); } catch (JsonProcessingException e) { - throw new IllegalArgumentException(e); + throw new IllegalArgumentException("Unable to get json schema from schema info", e); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java index 56b3bead5..7c1e0a052 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java @@ -17,6 +17,7 @@ import com.google.protobuf.Parser; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.SerializationFormat; +import lombok.AccessLevel; import lombok.Data; import lombok.Getter; @@ -154,27 +155,22 @@ public static ProtobufSchema from(SchemaInfo schemaInfo) { return new ProtobufSchema<>(fileDescriptorSet, schemaInfo); } catch (InvalidProtocolBufferException ex) { - throw new IllegalArgumentException(ex); + throw new IllegalArgumentException("Unable to get protobuf schema from schemainfo", ex); } } private static class Extractor { + @Getter(AccessLevel.PRIVATE) private Class tClass; + @Getter(AccessLevel.PRIVATE) private T defaultInstance; - private Parser tParser; + @Getter(AccessLevel.PRIVATE) + private Parser parser; Extractor(Class tClass) { this.tClass = tClass; } - T getDefaultInstance() { - return defaultInstance; - } - - Parser getParser() { - return tParser; - } - String getFullName() { return defaultInstance.getDescriptorForType().getFullName(); } @@ -191,7 +187,7 @@ Extractor invoke() { } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { throw new IllegalArgumentException(e); } - tParser = (Parser) defaultInstance.getParserForType(); + parser = (Parser) defaultInstance.getParserForType(); return this; } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java index b3f9871dc..60ad99ab4 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java @@ -9,13 +9,13 @@ */ package io.pravega.schemaregistry.serializers; +import com.google.common.base.Preconditions; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.Schema; import lombok.SneakyThrows; -import lombok.Synchronized; import lombok.extern.slf4j.Slf4j; import javax.annotation.Nullable; @@ -45,6 +45,9 @@ protected AbstractDeserializer(String groupId, SerializerConfig.Decoder decoder, EncodingCache encodingCache, boolean encodeHeader) { + Preconditions.checkNotNull(groupId); + Preconditions.checkNotNull(client); + Preconditions.checkNotNull(encodingCache); this.groupId = groupId; this.client = client; this.encodingCache = encodingCache; @@ -56,7 +59,6 @@ protected AbstractDeserializer(String groupId, initialize(); } - @Synchronized private void initialize() { if (schemaInfo != null) { log.info("Validate caller supplied schema."); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java index 570f07f26..d9c92c64c 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java @@ -10,6 +10,7 @@ package io.pravega.schemaregistry.serializers; import com.google.common.base.Preconditions; +import io.pravega.common.util.BitConverter; import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.codec.Codec; import io.pravega.schemaregistry.contract.data.EncodingId; @@ -27,6 +28,7 @@ abstract class AbstractSerializer extends BaseSerializer { private static final byte PROTOCOL = 0x1; + private static final int HEADER_LENGTH = Byte.BYTES + Integer.BYTES; private final String groupId; @@ -77,30 +79,32 @@ private void initialize() { @Override public ByteBuffer serialize(T obj) { ByteArrayOutputStream dataStream = new ByteArrayOutputStream(); - - // if schema is not null, pass the schema to the serializer implementation - if (schemaInfo != null) { - serialize(obj, schemaInfo, dataStream); - } else { - serialize(obj, null, dataStream); + if (this.encodeHeader) { + dataStream.write(PROTOCOL); + BitConverter.writeInt(dataStream, encodingId.get().getId()); } - + + serialize(obj, schemaInfo, dataStream); dataStream.flush(); - byte[] serialized = dataStream.toByteArray(); ByteBuffer byteBuffer; if (this.encodeHeader) { - Preconditions.checkNotNull(schemaInfo); - ByteBuffer encoded = codec.encode(ByteBuffer.wrap(serialized)); - int bufferSize = 5 + encoded.remaining(); - byteBuffer = ByteBuffer.allocate(bufferSize); - byteBuffer.put(PROTOCOL); - byteBuffer.putInt(encodingId.get().getId()); - byteBuffer.put(encoded); - byteBuffer.rewind(); + ByteBuffer wrap = ByteBuffer.wrap(serialized, HEADER_LENGTH, serialized.length - HEADER_LENGTH); + ByteBuffer encoded = codec.encode(wrap); + if (codec.equals(Codecs.None.getCodec())) { + // If no encoding is performed. we can directly use the original serialized byte array. + byteBuffer = ByteBuffer.wrap(serialized); + } else { + int bufferSize = HEADER_LENGTH + encoded.remaining(); + byteBuffer = ByteBuffer.allocate(bufferSize); + // copy the header from serialized array into encoded output array + byteBuffer.put(serialized, 0, HEADER_LENGTH); + byteBuffer.put(encoded); + byteBuffer.rewind(); + } } else { - byteBuffer = ByteBuffer.wrap(serialized); + byteBuffer = ByteBuffer.wrap(dataStream.toByteArray()); } return byteBuffer; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java index baf20bbe7..f00cfe825 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java @@ -9,12 +9,13 @@ */ package io.pravega.schemaregistry.serializers; +import com.fasterxml.jackson.databind.util.ByteBufferBackedInputStream; import io.pravega.schemaregistry.codec.Codec; import io.pravega.schemaregistry.contract.data.CodecType; import lombok.Getter; +import org.apache.commons.io.IOUtils; import org.xerial.snappy.Snappy; -import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; @@ -67,29 +68,18 @@ public CodecType getCodecType() { @Override public ByteBuffer encode(ByteBuffer data) throws IOException { - ByteArrayOutputStream bos = new ByteArrayOutputStream(data.remaining()); - GZIPOutputStream gzipOS = new GZIPOutputStream(bos); - gzipOS.write(data.array(), data.arrayOffset() + data.position(), data.remaining()); - gzipOS.close(); - byte[] compressed = bos.toByteArray(); - return ByteBuffer.wrap(compressed); + try (ByteArrayOutputStream bos = new ByteArrayOutputStream(data.remaining())) { + GZIPOutputStream gzipOS = new GZIPOutputStream(bos); + gzipOS.write(data.array(), data.arrayOffset() + data.position(), data.remaining()); + gzipOS.close(); + return ByteBuffer.wrap(bos.toByteArray()); + } } @Override public ByteBuffer decode(ByteBuffer data) throws IOException { - byte[] array = new byte[data.remaining()]; - data.get(array); - - ByteArrayInputStream bis = new ByteArrayInputStream(array); - ByteArrayOutputStream bos = new ByteArrayOutputStream(); - GZIPInputStream gzipIS = new GZIPInputStream(bis); - byte[] buffer = new byte[1024]; - int len; - while ((len = gzipIS.read(buffer)) != -1) { - bos.write(buffer, 0, len); - } - byte[] uncompressed = bos.toByteArray(); - return ByteBuffer.wrap(uncompressed); + ByteBufferBackedInputStream bis = new ByteBufferBackedInputStream(data); + return ByteBuffer.wrap(IOUtils.toByteArray(new GZIPInputStream(bis))); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java similarity index 85% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java index 6df7b03d1..98eaa587d 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java @@ -21,13 +21,13 @@ import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; -class JsonDeserlizer extends AbstractDeserializer { +class JsonDeserializer extends AbstractDeserializer { private final JSONSchema jsonSchema; private final ObjectMapper objectMapper; - JsonDeserlizer(String groupId, SchemaRegistryClient client, - JSONSchema schema, - SerializerConfig.Decoder decoder, EncodingCache encodingCache, boolean encodeHeader) { + JsonDeserializer(String groupId, SchemaRegistryClient client, + JSONSchema schema, + SerializerConfig.Decoder decoder, EncodingCache encodingCache, boolean encodeHeader) { super(groupId, client, schema, true, decoder, encodingCache, encodeHeader); Preconditions.checkNotNull(schema); this.jsonSchema = schema; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java index 5f6061ddb..e1b086ebb 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java @@ -46,7 +46,7 @@ static Serializer deserializer(SerializerConfig config, JSONSchema sch EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); // schema can be null in which case deserialization will happen into dynamic message - return new JsonDeserlizer<>(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache, + return new JsonDeserializer<>(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache, config.isWriteEncodingHeader()); } @@ -99,7 +99,7 @@ static Serializer multiTypeDeserializer( Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), + x -> new JsonDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache, config.isWriteEncodingHeader()))); return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), encodingCache); @@ -117,7 +117,7 @@ static Serializer>> typedOrGenericDeserialize Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new JsonDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache, + x -> new JsonDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache, config.isWriteEncodingHeader()))); JsonGenericDeserializer genericDeserializer = new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoder(), encodingCache, config.isWriteEncodingHeader()); From c6a02c8bd48abf7baa3e46c24367428f08f963ef Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Fri, 10 Jul 2020 08:19:50 -0700 Subject: [PATCH 55/70] fix Signed-off-by: Shivesh Ranjan --- .../schemaregistry/serializers/AbstractSerializer.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java index d9c92c64c..1dd622edf 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java @@ -90,12 +90,12 @@ public ByteBuffer serialize(T obj) { ByteBuffer byteBuffer; if (this.encodeHeader) { - ByteBuffer wrap = ByteBuffer.wrap(serialized, HEADER_LENGTH, serialized.length - HEADER_LENGTH); - ByteBuffer encoded = codec.encode(wrap); if (codec.equals(Codecs.None.getCodec())) { // If no encoding is performed. we can directly use the original serialized byte array. byteBuffer = ByteBuffer.wrap(serialized); } else { + ByteBuffer wrap = ByteBuffer.wrap(serialized, HEADER_LENGTH, serialized.length - HEADER_LENGTH); + ByteBuffer encoded = codec.encode(wrap); int bufferSize = HEADER_LENGTH + encoded.remaining(); byteBuffer = ByteBuffer.allocate(bufferSize); // copy the header from serialized array into encoded output array @@ -104,7 +104,7 @@ public ByteBuffer serialize(T obj) { byteBuffer.rewind(); } } else { - byteBuffer = ByteBuffer.wrap(dataStream.toByteArray()); + byteBuffer = ByteBuffer.wrap(serialized); } return byteBuffer; From bf0dfb4ef8866b06ab1014c645ebace1ceb47cb0 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Sat, 11 Jul 2020 19:52:14 -0700 Subject: [PATCH 56/70] PR comments Signed-off-by: Shivesh Ranjan --- .../schemaregistry/GroupIdGenerator.java | 37 ------ .../pravega/schemaregistry/codec/Codec.java | 39 ++---- .../{serializers => codec}/Codecs.java | 40 ++++--- .../pravega/schemaregistry/codec/Decoder.java | 31 +++++ .../pravega/schemaregistry/codec/Encoder.java | 37 ++++++ .../schemas/ProtobufSchema.java | 3 +- .../serializers/AbstractDeserializer.java | 13 +- .../serializers/AbstractSerializer.java | 46 ++++--- ...oDeserlizer.java => AvroDeserializer.java} | 8 +- ...izer.java => AvroGenericDeserializer.java} | 6 +- .../serializers/AvroSerializer.java | 6 +- .../serializers/AvroSerializerFactory.java | 20 ++-- .../serializers/CustomSerializerFactory.java | 4 +- .../serializers/FailingSerializer.java | 26 ---- .../serializers/JsonDeserializer.java | 4 +- .../serializers/JsonGenericDeserializer.java | 4 +- .../serializers/JsonSerializer.java | 6 +- .../serializers/JsonSerializerFactory.java | 20 ++-- .../serializers/JsonStringDeserializer.java | 4 +- .../MultiFormatSerializerFactory.java | 28 ++--- .../MultiFormatWithSchemaDeserializer.java | 4 +- .../MultipleFormatDeserializer.java | 4 +- .../MultiplexedAndGenericDeserializer.java | 4 +- .../serializers/MultiplexedDeserializer.java | 4 +- ...erlizer.java => ProtobufDeserializer.java} | 8 +- .../ProtobufGenericDeserlizer.java | 2 +- .../serializers/ProtobufSerializer.java | 6 +- .../ProtobufSerializerFactory.java | 18 +-- .../serializers/SerializerConfig.java | 112 ++++++++---------- .../serializers/SerializerFactoryHelper.java | 8 +- .../pravega/schemaregistry/GroupIdTest.java | 27 ----- .../schemaregistry/codec/CodecTest.java | 14 +-- .../schemaregistry/serializers/CacheTest.java | 1 + .../serializers/SerializerTest.java | 27 +++-- 34 files changed, 293 insertions(+), 328 deletions(-) delete mode 100644 serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java rename serializers/src/main/java/io/pravega/schemaregistry/{serializers => codec}/Codecs.java (81%) create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/codec/Decoder.java create mode 100644 serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{AvroDeserlizer.java => AvroDeserializer.java} (90%) rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{AvroGenericDeserlizer.java => AvroGenericDeserializer.java} (86%) delete mode 100644 serializers/src/main/java/io/pravega/schemaregistry/serializers/FailingSerializer.java rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{ProtobufDeserlizer.java => ProtobufDeserializer.java} (79%) delete mode 100644 serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java diff --git a/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java b/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java deleted file mode 100644 index 6792667a3..000000000 --- a/serializers/src/main/java/io/pravega/schemaregistry/GroupIdGenerator.java +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry; - -import com.google.common.base.Preconditions; -import io.pravega.shared.NameUtils; - -/** - * Defines strategies for generating groupId for stream. - * Currently there is only one naming strategy that uses streams fully qualified scoped stream name. - */ -public class GroupIdGenerator { - private GroupIdGenerator() { - } - - public static String getGroupId(Scheme scheme, String... args) { - switch (scheme) { - case QualifiedStreamName: - Preconditions.checkNotNull(args); - Preconditions.checkArgument(args.length == 2, "Both scope and stream name should be supplied."); - return NameUtils.getScopedStreamName(args[0], args[1]); - default: - throw new IllegalArgumentException("Unknown Group id generation schema."); - } - } - - public enum Scheme { - QualifiedStreamName, - } -} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java index b2787ed71..b2e1f26ff 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codec.java @@ -12,40 +12,19 @@ import io.pravega.schemaregistry.contract.data.CodecType; import io.pravega.schemaregistry.contract.data.EncodingInfo; -import java.io.IOException; -import java.nio.ByteBuffer; - /** - * Codec interface that defines methods to encode and decoder data for a given codec type. + * Codec interface extends {@link Encoder} and {@link Decoder} interfaces that defines methods to encode and decode + * data. Encoder interface takes a codec type and encoding function. Decoder interface defines a decoding function. */ -public interface Codec { +public interface Codec extends Encoder, Decoder { /** - * Codec Type object that contains a string name identifying the Codec Type. - * This name should be same as the codecType that is registered for the group in schema registry service. - * The serializers will use this codec to encode the data and deserializers will find - * the decoder for the encoded data from {@link EncodingInfo#getCodecType()} + * Name identifying the Codec Type. + * This name should be same as the {@link CodecType#getName()} that is registered for the group in schema registry + * service. + * The deserializers will find the decoder for the encoded data from {@link EncodingInfo#getCodecType()} by matching + * the name. * * @return Name of the codec. */ - CodecType getCodecType(); - - /** - * Implementation should encode the remaining bytes in the buffer and return a new ByteBuffer that includes - * the encoded data at its current position. - * - * @param data ByteBuffer to encode. - * @return encoded ByteBuffer with position set to the start of encoded data. - * @throws IOException IOException can be thrown while reading from or writing to byte buffers. - */ - ByteBuffer encode(ByteBuffer data) throws IOException; - - /** - * Implementation should decode the remaining bytes in the buffer and return a new ByteBuffer that includes - * the decoded data at its current position. - * - * @param data encoded ByteBuffer to decode. - * @return decoded ByteBuffer with position set to the start of decoded data. - * @throws IOException can be thrown while reading from or writing to byte buffers. - */ - ByteBuffer decode(ByteBuffer data) throws IOException; + String getName(); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java similarity index 81% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java rename to serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java index f00cfe825..46a756893 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/Codecs.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java @@ -7,10 +7,9 @@ * * http://www.apache.org/licenses/LICENSE-2.0 */ -package io.pravega.schemaregistry.serializers; +package io.pravega.schemaregistry.codec; import com.fasterxml.jackson.databind.util.ByteBufferBackedInputStream; -import io.pravega.schemaregistry.codec.Codec; import io.pravega.schemaregistry.contract.data.CodecType; import lombok.Getter; import org.apache.commons.io.IOUtils; @@ -19,6 +18,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; +import java.util.Map; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; @@ -26,23 +26,25 @@ * Utility class for creating codecs for none, snappy or gzip. */ public enum Codecs { - None(Constants.NOOP, Constants.NOOP.getCodecType()), - GzipCompressor(Constants.GZIP_CODEC, Constants.GZIP_CODEC.getCodecType()), - SnappyCompressor(Constants.SNAPPY_CODEC, Constants.SNAPPY_CODEC.getCodecType()); + None(Constants.NOOP), + GzipCompressor(Constants.GZIP_CODEC), + SnappyCompressor(Constants.SNAPPY_CODEC); @Getter private final Codec codec; - @Getter - private final CodecType codecType; - Codecs(Codec codec, CodecType codecType) { + Codecs(Codec codec) { this.codec = codec; - this.codecType = codecType; } private static class Noop implements Codec { private static final CodecType CODEC_TYPE_NONE = new CodecType(Constants.NONE); + @Override + public String getName() { + return CODEC_TYPE_NONE.getName(); + } + @Override public CodecType getCodecType() { return CODEC_TYPE_NONE; @@ -54,18 +56,23 @@ public ByteBuffer encode(ByteBuffer data) { } @Override - public ByteBuffer decode(ByteBuffer data) { + public ByteBuffer decode(ByteBuffer data, Map codecProperties) { return data; } } private static class GZipCodec implements Codec { private static final CodecType CODEC_TYPE_GZIP = new CodecType(Constants.APPLICATION_X_GZIP); + @Override + public String getName() { + return CODEC_TYPE_GZIP.getName(); + } + @Override public CodecType getCodecType() { return CODEC_TYPE_GZIP; } - + @Override public ByteBuffer encode(ByteBuffer data) throws IOException { try (ByteArrayOutputStream bos = new ByteArrayOutputStream(data.remaining())) { @@ -77,7 +84,7 @@ public ByteBuffer encode(ByteBuffer data) throws IOException { } @Override - public ByteBuffer decode(ByteBuffer data) throws IOException { + public ByteBuffer decode(ByteBuffer data, Map codecProperties) throws IOException { ByteBufferBackedInputStream bis = new ByteBufferBackedInputStream(data); return ByteBuffer.wrap(IOUtils.toByteArray(new GZIPInputStream(bis))); } @@ -85,11 +92,16 @@ public ByteBuffer decode(ByteBuffer data) throws IOException { private static class SnappyCodec implements Codec { private static final CodecType CODEC_TYPE_SNAPPY = new CodecType(Constants.APPLICATION_X_SNAPPY_FRAMED); + @Override + public String getName() { + return CODEC_TYPE_SNAPPY.getName(); + } + @Override public CodecType getCodecType() { return CODEC_TYPE_SNAPPY; } - + @Override public ByteBuffer encode(ByteBuffer data) throws IOException { int capacity = Snappy.maxCompressedLength(data.remaining()); @@ -102,7 +114,7 @@ public ByteBuffer encode(ByteBuffer data) throws IOException { } @Override - public ByteBuffer decode(ByteBuffer data) throws IOException { + public ByteBuffer decode(ByteBuffer data, Map codecProperties) throws IOException { ByteBuffer decoded = ByteBuffer.allocate(Snappy.uncompressedLength(data.array(), data.arrayOffset() + data.position(), data.remaining())); Snappy.uncompress(data.array(), data.arrayOffset() + data.position(), diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Decoder.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Decoder.java new file mode 100644 index 000000000..5c0d7f3a8 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Decoder.java @@ -0,0 +1,31 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.codec; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Map; + +/** + * Decoder interface that defines method to decode data. + */ +@FunctionalInterface +public interface Decoder { + /** + * Implementation should decode the remaining bytes in the buffer and return a new ByteBuffer that includes + * the decoded data at its current position. + * + * @param data encoded ByteBuffer to decode. + * @param codecProperties codec properties. + * @return decoded ByteBuffer with position set to the start of decoded data. + * @throws IOException can be thrown while reading from or writing to byte buffers. + */ + ByteBuffer decode(ByteBuffer data, Map codecProperties) throws IOException; +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java new file mode 100644 index 000000000..ec5e78d19 --- /dev/null +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java @@ -0,0 +1,37 @@ +/** + * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +package io.pravega.schemaregistry.codec; + +import io.pravega.schemaregistry.contract.data.CodecType; + +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * Defines method to encode data. + */ +public interface Encoder { + /** + * Codec type for the encoder. + * + * @return Codec Type for the encoder. + */ + CodecType getCodecType(); + + /** + * Implementation should encode the remaining bytes in the buffer and return a new ByteBuffer that includes + * the encoded data at its current position. + * + * @param data ByteBuffer to encode. + * @return encoded ByteBuffer with position set to the start of encoded data. + * @throws IOException IOException can be thrown while reading from or writing to byte buffers. + */ + ByteBuffer encode(ByteBuffer data) throws IOException; +} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java index 7c1e0a052..4555052d2 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java @@ -127,7 +127,7 @@ public static ProtobufSchema /** * Method to generate protobuf schema from the supplied protobuf generated class. It creates the {@link FileDescriptorSet} * from the generated class. - * This method is same as {@link #of(Class, FileDescriptorSet)} except that it returns a Protobuf schema + * This method is same as {@link #of(Class)} except that it returns a Protobuf schema * typed {@link GeneratedMessageV3}. * It is useful in multiplexed deserializer to pass all objects to deserialize into as base {@link GeneratedMessageV3} objects. * @@ -176,6 +176,7 @@ String getFullName() { } FileDescriptorSet getFileDescriptorSet() { + // TODO: verify that the file proto has descriptors for all message types return FileDescriptorSet .newBuilder().addFile(defaultInstance.getDescriptorForType().getFile().toProto()).build(); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java index 60ad99ab4..67807d4ae 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java @@ -34,7 +34,7 @@ abstract class AbstractDeserializer extends BaseDeserializer { // If headers are not encoded, then this will be the latest schema from the registry private final SchemaInfo schemaInfo; private final boolean encodeHeader; - private final SerializerConfig.Decoder decoder; + private final SerializerConfig.Decoders decoders; private final boolean skipHeaders; private final EncodingCache encodingCache; @@ -42,7 +42,7 @@ protected AbstractDeserializer(String groupId, SchemaRegistryClient client, @Nullable Schema schema, boolean skipHeaders, - SerializerConfig.Decoder decoder, + SerializerConfig.Decoders decoders, EncodingCache encodingCache, boolean encodeHeader) { Preconditions.checkNotNull(groupId); @@ -54,7 +54,7 @@ protected AbstractDeserializer(String groupId, this.schemaInfo = schema == null ? null : schema.getSchemaInfo(); this.encodeHeader = encodeHeader; this.skipHeaders = skipHeaders; - this.decoder = decoder; + this.decoders = decoders; initialize(); } @@ -63,7 +63,7 @@ private void initialize() { if (schemaInfo != null) { log.info("Validate caller supplied schema."); if (!client.canReadUsing(groupId, schemaInfo)) { - throw new IllegalArgumentException("Cannot read using schema" + schemaInfo.getType()); + throw new IllegalArgumentException("Cannot read using schema" + schemaInfo.getType() + " as it is considered incompatible with current policy."); } } else { if (!this.encodeHeader) { @@ -75,6 +75,9 @@ private void initialize() { @SneakyThrows(IOException.class) @Override public T deserialize(ByteBuffer data) { + if (!data.hasArray()) { + return null; + } int start = data.arrayOffset() + data.position(); if (this.encodeHeader) { SchemaInfo writerSchema = null; @@ -87,7 +90,7 @@ public T deserialize(ByteBuffer data) { EncodingId encodingId = new EncodingId(data.getInt()); EncodingInfo encodingInfo = encodingCache.getGroupEncodingInfo(encodingId); writerSchema = encodingInfo.getSchemaInfo(); - decoded = decoder.decode(encodingInfo.getCodecType(), data); + decoded = decoders.decode(encodingInfo.getCodecType(), data); } ByteArrayInputStream bais = new ByteArrayInputStream(decoded.array(), diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java index 1dd622edf..2d98faace 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java @@ -10,9 +10,11 @@ package io.pravega.schemaregistry.serializers; import com.google.common.base.Preconditions; +import io.pravega.common.io.EnhancedByteArrayOutputStream; import io.pravega.common.util.BitConverter; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.codec.Codec; +import io.pravega.schemaregistry.codec.Codecs; +import io.pravega.schemaregistry.codec.Encoder; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.contract.data.VersionInfo; @@ -20,7 +22,6 @@ import lombok.Getter; import lombok.SneakyThrows; -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; @@ -37,18 +38,18 @@ abstract class AbstractSerializer extends BaseSerializer { private final boolean encodeHeader; private final SchemaRegistryClient client; @Getter - private final Codec codec; + private final Encoder encoder; private final boolean registerSchema; protected AbstractSerializer(String groupId, SchemaRegistryClient client, Schema schema, - Codec codec, + Encoder encoder, boolean registerSchema, boolean encodeHeader) { Preconditions.checkNotNull(groupId); Preconditions.checkNotNull(client); - Preconditions.checkNotNull(codec); + Preconditions.checkNotNull(encoder); Preconditions.checkNotNull(schema); this.groupId = groupId; @@ -56,7 +57,7 @@ protected AbstractSerializer(String groupId, this.schemaInfo = schema.getSchemaInfo(); this.registerSchema = registerSchema; this.encodingId = new AtomicReference<>(); - this.codec = codec; + this.encoder = encoder; this.encodeHeader = encodeHeader; initialize(); } @@ -71,40 +72,35 @@ private void initialize() { version = client.getVersionForSchema(groupId, schemaInfo); } if (encodeHeader) { - encodingId.set(client.getEncodingId(groupId, version, codec.getCodecType().getName())); + encodingId.set(client.getEncodingId(groupId, version, encoder.getCodecType().getName())); } } @SneakyThrows(IOException.class) @Override public ByteBuffer serialize(T obj) { - ByteArrayOutputStream dataStream = new ByteArrayOutputStream(); + EnhancedByteArrayOutputStream dataStream = new EnhancedByteArrayOutputStream(); if (this.encodeHeader) { dataStream.write(PROTOCOL); BitConverter.writeInt(dataStream, encodingId.get().getId()); } serialize(obj, schemaInfo, dataStream); - dataStream.flush(); - byte[] serialized = dataStream.toByteArray(); ByteBuffer byteBuffer; - if (this.encodeHeader) { - if (codec.equals(Codecs.None.getCodec())) { - // If no encoding is performed. we can directly use the original serialized byte array. - byteBuffer = ByteBuffer.wrap(serialized); - } else { - ByteBuffer wrap = ByteBuffer.wrap(serialized, HEADER_LENGTH, serialized.length - HEADER_LENGTH); - ByteBuffer encoded = codec.encode(wrap); - int bufferSize = HEADER_LENGTH + encoded.remaining(); - byteBuffer = ByteBuffer.allocate(bufferSize); - // copy the header from serialized array into encoded output array - byteBuffer.put(serialized, 0, HEADER_LENGTH); - byteBuffer.put(encoded); - byteBuffer.rewind(); - } + byte[] serialized = dataStream.getData().array(); + if (!encoder.equals(Codecs.None.getCodec())) { + ByteBuffer wrap = ByteBuffer.wrap(serialized, HEADER_LENGTH, + dataStream.getData().getLength() - HEADER_LENGTH); + ByteBuffer encoded = encoder.encode(wrap); + int bufferSize = HEADER_LENGTH + encoded.remaining(); + byteBuffer = ByteBuffer.allocate(bufferSize); + // copy the header from serialized array into encoded output array + byteBuffer.put(serialized, 0, HEADER_LENGTH); + byteBuffer.put(encoded); + byteBuffer.rewind(); } else { - byteBuffer = ByteBuffer.wrap(serialized); + byteBuffer = ByteBuffer.wrap(serialized, 0, dataStream.getData().getLength()); } return byteBuffer; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserializer.java similarity index 90% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserializer.java index 4ab30e002..6cab88051 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserializer.java @@ -25,13 +25,13 @@ import java.io.InputStream; import java.util.concurrent.ConcurrentHashMap; -class AvroDeserlizer extends AbstractDeserializer { +class AvroDeserializer extends AbstractDeserializer { private final AvroSchema avroSchema; private final ConcurrentHashMap knownSchemas; - AvroDeserlizer(String groupId, SchemaRegistryClient client, - AvroSchema schema, - SerializerConfig.Decoder decoder, EncodingCache encodingCache) { + AvroDeserializer(String groupId, SchemaRegistryClient client, + AvroSchema schema, + SerializerConfig.Decoders decoder, EncodingCache encodingCache) { super(groupId, client, schema, false, decoder, encodingCache, true); Preconditions.checkNotNull(schema); this.avroSchema = schema; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserializer.java similarity index 86% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserializer.java index 8f86de555..77af89edf 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroGenericDeserializer.java @@ -23,11 +23,11 @@ import java.io.InputStream; import java.util.concurrent.ConcurrentHashMap; -class AvroGenericDeserlizer extends AbstractDeserializer { +class AvroGenericDeserializer extends AbstractDeserializer { private final ConcurrentHashMap knownSchemas; - AvroGenericDeserlizer(String groupId, SchemaRegistryClient client, @Nullable AvroSchema schema, - SerializerConfig.Decoder decoder, EncodingCache encodingCache) { + AvroGenericDeserializer(String groupId, SchemaRegistryClient client, @Nullable AvroSchema schema, + SerializerConfig.Decoders decoder, EncodingCache encodingCache) { super(groupId, client, schema, false, decoder, encodingCache, true); this.knownSchemas = new ConcurrentHashMap<>(); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java index 90550ee01..99231a11c 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializer.java @@ -10,7 +10,7 @@ package io.pravega.schemaregistry.serializers; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.codec.Codec; +import io.pravega.schemaregistry.codec.Encoder; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.AvroSchema; import org.apache.avro.Schema; @@ -28,8 +28,8 @@ class AvroSerializer extends AbstractSerializer { private final AvroSchema avroSchema; AvroSerializer(String groupId, SchemaRegistryClient client, AvroSchema schema, - Codec codec, boolean registerSchema) { - super(groupId, client, schema, codec, registerSchema, true); + Encoder encoder, boolean registerSchema) { + super(groupId, client, schema, encoder, registerSchema, true); this.avroSchema = schema; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java index a44942845..5e9cb8170 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroSerializerFactory.java @@ -34,7 +34,7 @@ static Serializer serializer(SerializerConfig config, AvroSchema schem Preconditions.checkArgument(config.isWriteEncodingHeader(), "Events should be tagged with encoding ids."); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); String groupId = config.getGroupId(); - return new AvroSerializer<>(groupId, schemaRegistryClient, schema, config.getCodec(), config.isRegisterSchema()); + return new AvroSerializer<>(groupId, schemaRegistryClient, schema, config.getEncoder(), config.isRegisterSchema()); } static Serializer deserializer(SerializerConfig config, AvroSchema schema) { @@ -46,7 +46,7 @@ static Serializer deserializer(SerializerConfig config, AvroSchema sch EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - return new AvroDeserlizer<>(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache); + return new AvroDeserializer<>(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache); } static Serializer genericDeserializer(SerializerConfig config, @Nullable AvroSchema schema) { @@ -56,7 +56,7 @@ static Serializer genericDeserializer(SerializerConfig config, @Nullable SchemaRegistryClient schemaRegistryClient = initForDeserializer(config); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - return new AvroGenericDeserlizer(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache); + return new AvroGenericDeserializer(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache); } static Serializer multiTypeSerializer(SerializerConfig config, Map, AvroSchema> schemas) { @@ -68,7 +68,7 @@ static Serializer multiTypeSerializer(SerializerConfig config, Map, AbstractSerializer> serializerMap = schemas .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, - x -> new AvroSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), + x -> new AvroSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getEncoder(), config.isRegisterSchema()))); return new MultiplexedSerializer<>(serializerMap); } @@ -86,8 +86,8 @@ static Serializer multiTypeDeserializer( Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new AvroDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); - return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), + x -> new AvroDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache))); + return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoders(), encodingCache); } @@ -104,10 +104,10 @@ static Serializer> typedOrGenericDeserializer( Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new AvroDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache))); - AbstractDeserializer genericDeserializer = new AvroGenericDeserlizer(groupId, schemaRegistryClient, - null, config.getDecoder(), encodingCache); + x -> new AvroDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache))); + AbstractDeserializer genericDeserializer = new AvroGenericDeserializer(groupId, schemaRegistryClient, + null, config.getDecoders(), encodingCache); return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, - config.getDecoder(), encodingCache); + config.getDecoders(), encodingCache); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java index 05a2f64bb..380a067af 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/CustomSerializerFactory.java @@ -35,7 +35,7 @@ static Serializer serializer(SerializerConfig config, Schema schema, C String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); return new AbstractSerializer(groupId, schemaRegistryClient, - schema, config.getCodec(), config.isRegisterSchema(), config.isWriteEncodingHeader()) { + schema, config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()) { @Override protected void serialize(T var, SchemaInfo schema, OutputStream outputStream) { serializer.serialize(var, schema, outputStream); @@ -54,7 +54,7 @@ static Serializer deserializer(SerializerConfig config, @Nullable Schema< EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); return new AbstractDeserializer(groupId, schemaRegistryClient, schema, false, - config.getDecoder(), encodingCache, config.isWriteEncodingHeader()) { + config.getDecoders(), encodingCache, config.isWriteEncodingHeader()) { @Override protected T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { return deserializer.deserialize(inputStream, writerSchema, readerSchema); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/FailingSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/FailingSerializer.java deleted file mode 100644 index e624eab46..000000000 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/FailingSerializer.java +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry.serializers; - -import io.pravega.client.stream.Serializer; - -import java.nio.ByteBuffer; - -class FailingSerializer implements Serializer { - @Override - public ByteBuffer serialize(T value) { - throw new IllegalStateException(); - } - - @Override - public T deserialize(ByteBuffer serializedValue) { - throw new IllegalStateException(); - } -} diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java index 98eaa587d..62e794b21 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java @@ -27,8 +27,8 @@ class JsonDeserializer extends AbstractDeserializer { JsonDeserializer(String groupId, SchemaRegistryClient client, JSONSchema schema, - SerializerConfig.Decoder decoder, EncodingCache encodingCache, boolean encodeHeader) { - super(groupId, client, schema, true, decoder, encodingCache, encodeHeader); + SerializerConfig.Decoders decoders, EncodingCache encodingCache, boolean encodeHeader) { + super(groupId, client, schema, true, decoders, encodingCache, encodeHeader); Preconditions.checkNotNull(schema); this.jsonSchema = schema; this.objectMapper = new ObjectMapper(); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java index 0b1b9deaa..d1d955af7 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java @@ -23,8 +23,8 @@ class JsonGenericDeserializer extends AbstractDeserializer> private final ObjectMapper objectMapper; JsonGenericDeserializer(String groupId, SchemaRegistryClient client, - SerializerConfig.Decoder decoder, EncodingCache encodingCache, boolean encodeHeader) { - super(groupId, client, null, false, decoder, encodingCache, encodeHeader); + SerializerConfig.Decoders decoders, EncodingCache encodingCache, boolean encodeHeader) { + super(groupId, client, null, false, decoders, encodingCache, encodeHeader); this.objectMapper = new ObjectMapper(); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); objectMapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java index 8b963d9a1..50f4d2ec8 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java @@ -13,7 +13,7 @@ import com.fasterxml.jackson.annotation.PropertyAccessor; import com.fasterxml.jackson.databind.ObjectMapper; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.codec.Codec; +import io.pravega.schemaregistry.codec.Encoder; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.JSONSchema; @@ -23,8 +23,8 @@ class JsonSerializer extends AbstractSerializer { private final ObjectMapper objectMapper; JsonSerializer(String groupId, SchemaRegistryClient client, JSONSchema schema, - Codec codec, boolean registerSchema, boolean encodeHeader) { - super(groupId, client, schema, codec, registerSchema, encodeHeader); + Encoder encoder, boolean registerSchema, boolean encodeHeader) { + super(groupId, client, schema, encoder, registerSchema, encodeHeader); objectMapper = new ObjectMapper(); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); objectMapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java index e1b086ebb..76dc8c983 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializerFactory.java @@ -33,7 +33,7 @@ static Serializer serializer(SerializerConfig config, JSONSchema schem Preconditions.checkNotNull(schema); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); - return new JsonSerializer<>(groupId, schemaRegistryClient, schema, config.getCodec(), + return new JsonSerializer<>(groupId, schemaRegistryClient, schema, config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()); } @@ -46,7 +46,7 @@ static Serializer deserializer(SerializerConfig config, JSONSchema sch EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); // schema can be null in which case deserialization will happen into dynamic message - return new JsonDeserializer<>(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache, + return new JsonDeserializer<>(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); } @@ -58,7 +58,7 @@ static Serializer> genericDeserializer(SerializerConfig con EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - return new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoder(), + return new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); } @@ -70,7 +70,7 @@ static Serializer jsonStringDeserializer(SerializerConfig config) { EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - return new JsonStringDeserializer(groupId, schemaRegistryClient, config.getDecoder(), encodingCache, config.isWriteEncodingHeader()); + return new JsonStringDeserializer(groupId, schemaRegistryClient, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); } static Serializer multiTypeSerializer( @@ -82,7 +82,7 @@ static Serializer multiTypeSerializer( SchemaRegistryClient schemaRegistryClient = initForSerializer(config); Map, AbstractSerializer> serializerMap = schemas .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, - x -> new JsonSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), + x -> new JsonSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()))); return new MultiplexedSerializer<>(serializerMap); } @@ -99,10 +99,10 @@ static Serializer multiTypeDeserializer( Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new JsonDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoder(), + x -> new JsonDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()))); return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, - deserializerMap, config.getDecoder(), encodingCache); + deserializerMap, config.getDecoders(), encodingCache); } static Serializer>> typedOrGenericDeserializer( @@ -117,12 +117,12 @@ static Serializer>> typedOrGenericDeserialize Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new JsonDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache, + x -> new JsonDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()))); - JsonGenericDeserializer genericDeserializer = new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoder(), + JsonGenericDeserializer genericDeserializer = new JsonGenericDeserializer(groupId, schemaRegistryClient, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, - deserializerMap, genericDeserializer, config.getDecoder(), encodingCache); + deserializerMap, genericDeserializer, config.getDecoders(), encodingCache); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java index fcc1bfef7..5add11d9b 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonStringDeserializer.java @@ -22,8 +22,8 @@ class JsonStringDeserializer extends AbstractDeserializer { private final ObjectMapper objectMapper; JsonStringDeserializer(String groupId, SchemaRegistryClient client, - SerializerConfig.Decoder decoder, EncodingCache encodingCache, boolean encodeHeader) { - super(groupId, client, null, false, decoder, encodingCache, encodeHeader); + SerializerConfig.Decoders decoders, EncodingCache encodingCache, boolean encodeHeader) { + super(groupId, client, null, false, decoders, encodingCache, encodeHeader); this.objectMapper = new ObjectMapper(); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); objectMapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java index 06ae68a56..8b1766923 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java @@ -99,10 +99,10 @@ private static Serializer deserializeAsTInternal(SerializerConfig config, EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); AbstractDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, - config.getDecoder(), encodingCache, config.isWriteEncodingHeader()); - AbstractDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); + AbstractDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); - AbstractDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + AbstractDeserializer avro = new AvroGenericDeserializer(groupId, schemaRegistryClient, null, config.getDecoders(), encodingCache); Map map = new HashMap<>(); @@ -112,7 +112,7 @@ private static Serializer deserializeAsTInternal(SerializerConfig config, deserializers.forEach((key, value) -> { map.put(key, new AbstractDeserializer(groupId, schemaRegistryClient, null, false, - config.getDecoder(), encodingCache, config.isWriteEncodingHeader()) { + config.getDecoders(), encodingCache, config.isWriteEncodingHeader()) { @Override protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { return value.deserialize(inputStream, writerSchema, readerSchema); @@ -120,7 +120,7 @@ protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, S }); }); - return new MultipleFormatDeserializer<>(groupId, schemaRegistryClient, map, config.getDecoder(), + return new MultipleFormatDeserializer<>(groupId, schemaRegistryClient, map, config.getDecoders(), encodingCache, transform); } @@ -131,10 +131,10 @@ private static Serializer> deserializerInternal(SerializerConf EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); AbstractDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, - config.getDecoder(), encodingCache, config.isWriteEncodingHeader()); - AbstractDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); + AbstractDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); - AbstractDeserializer avro = new AvroGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoder(), + AbstractDeserializer avro = new AvroGenericDeserializer(groupId, schemaRegistryClient, null, config.getDecoders(), encodingCache); Map map = new HashMap<>(); @@ -144,7 +144,7 @@ private static Serializer> deserializerInternal(SerializerConf deserializers.forEach((key, value) -> { map.put(key, new AbstractDeserializer(groupId, schemaRegistryClient, null, false, - config.getDecoder(), encodingCache, config.isWriteEncodingHeader()) { + config.getDecoders(), encodingCache, config.isWriteEncodingHeader()) { @Override protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) { return value.deserialize(inputStream, writerSchema, readerSchema); @@ -152,7 +152,7 @@ protected Object deserialize(InputStream inputStream, SchemaInfo writerSchema, S }); }); - return new MultiFormatWithSchemaDeserializer<>(groupId, schemaRegistryClient, map, config.getDecoder(), + return new MultiFormatWithSchemaDeserializer<>(groupId, schemaRegistryClient, map, config.getDecoders(), encodingCache, transform); } @@ -163,14 +163,14 @@ private static AbstractSerializer getPravegaSerializer( switch (schemaInfo.getSerializationFormat()) { case Avro: return new AvroSerializer<>(groupId, schemaRegistryClient, - AvroSchema.from(schemaInfo), config.getCodec(), config.isRegisterSchema()); + AvroSchema.from(schemaInfo), config.getEncoder(), config.isRegisterSchema()); case Protobuf: ProtobufSerializer m = new ProtobufSerializer<>(groupId, schemaRegistryClient, - ProtobufSchema.from(schemaInfo), config.getCodec(), config.isRegisterSchema(), config.isWriteEncodingHeader()); + ProtobufSchema.from(schemaInfo), config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()); return (AbstractSerializer) m; case Json: return new JsonSerializer<>(groupId, schemaRegistryClient, JSONSchema.from(schemaInfo), - config.getCodec(), config.isRegisterSchema(), config.isWriteEncodingHeader()); + config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()); case Custom: return getCustomSerializer(config, customSerializers, schemaRegistryClient, groupId, schemaInfo); default: @@ -184,7 +184,7 @@ private static AbstractSerializer getCustomSerializer( if (customSerializers.containsKey(schemaInfo.getSerializationFormat())) { CustomSerializer serializer = customSerializers.get(schemaInfo.getSerializationFormat()); return new AbstractSerializer(groupId, schemaRegistryClient, - () -> schemaInfo, config.getCodec(), config.isRegisterSchema(), config.isWriteEncodingHeader()) { + () -> schemaInfo, config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()) { @Override protected void serialize(Object var, SchemaInfo schema, OutputStream outputStream) { serializer.serialize(var, schema, outputStream); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java index e2b30f1a4..93e2e7f4d 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatWithSchemaDeserializer.java @@ -25,9 +25,9 @@ class MultiFormatWithSchemaDeserializer extends AbstractDeserializer genericDeserializers, - SerializerConfig.Decoder decoder, + SerializerConfig.Decoders decoders, EncodingCache encodingCache, BiFunction transform) { - super(groupId, client, null, false, decoder, encodingCache, true); + super(groupId, client, null, false, decoders, encodingCache, true); this.genericDeserializers = genericDeserializers; this.transform = transform; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java index 99ca02174..b47e5c9a4 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultipleFormatDeserializer.java @@ -25,9 +25,9 @@ class MultipleFormatDeserializer extends AbstractDeserializer { MultipleFormatDeserializer(String groupId, SchemaRegistryClient client, Map genericDeserializers, - SerializerConfig.Decoder decoder, + SerializerConfig.Decoders decoders, EncodingCache encodingCache, BiFunction transform) { - super(groupId, client, null, false, decoder, encodingCache, true); + super(groupId, client, null, false, decoders, encodingCache, true); this.genericDeserializers = genericDeserializers; this.transform = transform; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java index 779df458f..1baf69f21 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedAndGenericDeserializer.java @@ -25,9 +25,9 @@ class MultiplexedAndGenericDeserializer extends AbstractDeserializer> deserializers, AbstractDeserializer genericDeserializer, - SerializerConfig.Decoder decoder, + SerializerConfig.Decoders decoders, EncodingCache encodingCache) { - super(groupId, client, null, false, decoder, encodingCache, true); + super(groupId, client, null, false, decoders, encodingCache, true); this.deserializers = deserializers; this.genericDeserializer = genericDeserializer; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java index ee3c7250e..d10c5af21 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiplexedDeserializer.java @@ -29,9 +29,9 @@ class MultiplexedDeserializer extends AbstractDeserializer { MultiplexedDeserializer(String groupId, SchemaRegistryClient client, Map> deserializers, - SerializerConfig.Decoder decoder, + SerializerConfig.Decoders decoders, EncodingCache encodingCache) { - super(groupId, client, null, false, decoder, encodingCache, true); + super(groupId, client, null, false, decoders, encodingCache, true); this.deserializers = deserializers; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserializer.java similarity index 79% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserializer.java index 857e9099c..f0756bb2b 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserializer.java @@ -18,11 +18,11 @@ import java.io.InputStream; -public class ProtobufDeserlizer extends AbstractDeserializer { +public class ProtobufDeserializer extends AbstractDeserializer { private final ProtobufSchema protobufSchema; - ProtobufDeserlizer(String groupId, SchemaRegistryClient client, - ProtobufSchema schema, SerializerConfig.Decoder decoder, - EncodingCache encodingCache, boolean encodeHeader) { + ProtobufDeserializer(String groupId, SchemaRegistryClient client, + ProtobufSchema schema, SerializerConfig.Decoders decoder, + EncodingCache encodingCache, boolean encodeHeader) { super(groupId, client, schema, true, decoder, encodingCache, encodeHeader); Preconditions.checkNotNull(schema); this.protobufSchema = schema; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java index 428e01bab..ac70046f2 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java @@ -29,7 +29,7 @@ public class ProtobufGenericDeserlizer extends AbstractDeserializer knownSchemas; ProtobufGenericDeserlizer(String groupId, SchemaRegistryClient client, @Nullable ProtobufSchema schema, - SerializerConfig.Decoder decoder, EncodingCache encodingCache, boolean encodeHeader) { + SerializerConfig.Decoders decoder, EncodingCache encodingCache, boolean encodeHeader) { super(groupId, client, schema, false, decoder, encodingCache, encodeHeader); Preconditions.checkArgument(isEncodeHeader() || schema != null); knownSchemas = new ConcurrentHashMap<>(); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java index 880e76433..13954eca4 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java @@ -11,7 +11,7 @@ import com.google.protobuf.Message; import io.pravega.schemaregistry.client.SchemaRegistryClient; -import io.pravega.schemaregistry.codec.Codec; +import io.pravega.schemaregistry.codec.Encoder; import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.ProtobufSchema; @@ -20,8 +20,8 @@ class ProtobufSerializer extends AbstractSerializer { ProtobufSerializer(String groupId, SchemaRegistryClient client, ProtobufSchema schema, - Codec codec, boolean registerSchema, boolean encodeHeader) { - super(groupId, client, schema, codec, registerSchema, encodeHeader); + Encoder encoder, boolean registerSchema, boolean encodeHeader) { + super(groupId, client, schema, encoder, registerSchema, encodeHeader); } @Override diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java index 9f08b5df2..40ca25185 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java @@ -37,7 +37,7 @@ static Serializer serializer(SerializerConfig config, Preconditions.checkNotNull(schema); String groupId = config.getGroupId(); SchemaRegistryClient schemaRegistryClient = initForSerializer(config); - return new ProtobufSerializer<>(groupId, schemaRegistryClient, schema, config.getCodec(), + return new ProtobufSerializer<>(groupId, schemaRegistryClient, schema, config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()); } @@ -51,7 +51,7 @@ static Serializer deserializer(SerializerConfi EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); // schema can be null in which case deserialization will happen into dynamic message - return new ProtobufDeserlizer<>(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache, + return new ProtobufDeserializer<>(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); } @@ -64,7 +64,7 @@ static Serializer genericDeserializer(SerializerConfig config, @ String groupId = config.getGroupId(); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - return new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, schema, config.getDecoder(), encodingCache, + return new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); } @@ -78,7 +78,7 @@ static Serializer multiTypeSerializer( Map, AbstractSerializer> serializerMap = schemas .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, - x -> new ProtobufSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getCodec(), + x -> new ProtobufSerializer<>(groupId, schemaRegistryClient, x.getValue(), config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()))); return new MultiplexedSerializer<>(serializerMap); } @@ -95,9 +95,9 @@ static Serializer multiTypeDeserializer( Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache, + x -> new ProtobufDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()))); - return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoder(), encodingCache); + return new MultiplexedDeserializer<>(groupId, schemaRegistryClient, deserializerMap, config.getDecoders(), encodingCache); } static Serializer> typedOrGenericDeserializer( @@ -112,11 +112,11 @@ static Serializer> type Map> deserializerMap = schemas .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), - x -> new ProtobufDeserlizer<>(groupId, schemaRegistryClient, x, config.getDecoder(), encodingCache, + x -> new ProtobufDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()))); ProtobufGenericDeserlizer genericDeserializer = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, - config.getDecoder(), encodingCache, config.isWriteEncodingHeader()); + config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, - config.getDecoder(), encodingCache); + config.getDecoders(), encodingCache); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index c562ca553..b5af52ede 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -14,6 +14,9 @@ import io.pravega.schemaregistry.client.SchemaRegistryClient; import io.pravega.schemaregistry.client.SchemaRegistryClientConfig; import io.pravega.schemaregistry.codec.Codec; +import io.pravega.schemaregistry.codec.Codecs; +import io.pravega.schemaregistry.codec.Decoder; +import io.pravega.schemaregistry.codec.Encoder; import io.pravega.schemaregistry.common.Either; import io.pravega.schemaregistry.contract.data.CodecType; import io.pravega.schemaregistry.contract.data.Compatibility; @@ -28,10 +31,9 @@ import java.io.IOException; import java.nio.ByteBuffer; -import java.util.HashSet; +import java.util.Map; import java.util.Set; -import java.util.function.BiFunction; -import java.util.function.Function; +import java.util.concurrent.ConcurrentHashMap; /** * Serializer Config class that is passed to {@link SerializerFactory} for creating serializer. @@ -72,13 +74,15 @@ public class SerializerConfig { /** * Codec to use for encoding events after serializing them. */ - private final Codec codec; + private final Encoder encoder; /** * Function that should be applied on serialized data read from stream. This is invoked after reading the codecType * from {@link EncodingInfo} and using the codec type read from it. * It should return the decoded data back to the deserializer. + * Use {@link SerializerConfigBuilder#decoder(String, Decoder)} to add decoders. + * Any number of decoders can be added. */ - private final Decoder decoder; + private final Decoders decoders; /** * Tells the deserializer that if supplied decoder codecTypes do not match group codecTypes then fail and exit upfront. */ @@ -102,7 +106,7 @@ public class SerializerConfig { private final boolean writeEncodingHeader; private SerializerConfig(String groupId, SchemaRegistryClientConfig config, SchemaRegistryClient client, - boolean registerSchema, boolean registerCodec, Codec codec, Decoder decoder, boolean failOnCodecMismatch, + boolean registerSchema, boolean registerCodec, Codec encoder, Decoders decoders, boolean failOnCodecMismatch, GroupProperties createGroup, boolean writeEncodingHeader) { Preconditions.checkArgument(!Strings.isNullOrEmpty(groupId), "Group id needs to be supplied"); Preconditions.checkArgument(client != null || config != null, "Either registry client or config needs to be supplied"); @@ -111,8 +115,8 @@ private SerializerConfig(String groupId, SchemaRegistryClientConfig config, Sche this.registryConfig = config; this.registerSchema = registerSchema; this.registerCodec = registerCodec; - this.codec = codec; - this.decoder = decoder; + this.encoder = encoder; + this.decoders = decoders; this.failOnCodecMismatch = failOnCodecMismatch; this.createGroup = createGroup; this.writeEncodingHeader = writeEncodingHeader; @@ -135,9 +139,9 @@ GroupProperties getGroupProperties() { } public static final class SerializerConfigBuilder { - private Codec codec = Codecs.None.getCodec(); + private Codec encoder = Codecs.None.getCodec(); - private Decoder decoder = new Decoder(); + private Decoders decoders = new Decoders(); private boolean registerSchema = false; private boolean registerCodec = false; @@ -147,14 +151,25 @@ public static final class SerializerConfigBuilder { private SchemaRegistryClient registryClient = null; /** - * Add codec type to corresponding decoder function which will be used to decode data encoded using encoding type codecType. - * - * @param codecType codec type used for encoding. - * @param decoder decoder function to use for decoding the data. + * Add a decoder for decoding data encoded with the {@link Codec#getCodecType()}. + * + * @param name Name of codec from {@link CodecType#getName()}. + * @param decoder decoder implementation to use for decoding data encoded with the {@link Codec#getCodecType()}. * @return Builder. */ - public SerializerConfigBuilder addDecoder(CodecType codecType, Function decoder) { - this.decoder = new Decoder(codecType, decoder); + public SerializerConfigBuilder decoder(String name, Decoder decoder) { + this.decoders.add(name, decoder); + return this; + } + + /** + * Add multiple decoders. + * + * @param decoders map of codec name to decoder for the codec. + * @return Builder. + */ + public SerializerConfigBuilder decoders(Map decoders) { + this.decoders.addAll(decoders); return this; } @@ -223,58 +238,35 @@ public SerializerConfigBuilder registryConfig(SchemaRegistryClientConfig config) this.registryConfig = config; return this; } - - // writeEncoding header vs codec vs decoder - // if codec is supplied } - static class Decoder { - private static final BiFunction DEFAULT = (x, y) -> { - try { - switch (x.getName()) { - case Codecs.Constants.NONE: - return Codecs.None.getCodec().decode(y); - case Codecs.Constants.APPLICATION_X_GZIP: - return Codecs.GzipCompressor.getCodec().decode(y); - case Codecs.Constants.APPLICATION_X_SNAPPY_FRAMED: - return Codecs.SnappyCompressor.getCodec().decode(y); - default: - throw new IllegalArgumentException("Unknown codec"); - } - } catch (IOException ex) { - throw new RuntimeException(ex); - } - }; + static class Decoders { + private final ConcurrentHashMap decoders; - @Getter(AccessLevel.PACKAGE) - private final Set codecTypes; - private final BiFunction decoder; + Decoders() { + this.decoders = new ConcurrentHashMap<>(); + this.decoders.put(Codecs.None.getCodec().getName(), Codecs.None.getCodec()); + this.decoders.put(Codecs.GzipCompressor.getCodec().getName(), Codecs.GzipCompressor.getCodec()); + this.decoders.put(Codecs.SnappyCompressor.getCodec().getName(), Codecs.SnappyCompressor.getCodec()); + } + + private void add(String codecName, Decoder decoder) { + Preconditions.checkNotNull(codecName); + Preconditions.checkNotNull(decoder); + decoders.put(codecName, decoder); + } - private Decoder(CodecType codecType, Function decoder) { - this.decoder = (x, y) -> { - if (x.equals(codecType)) { - return decoder.apply(y); - } else { - return DEFAULT.apply(x, y); - } - }; - codecTypes = new HashSet<>(); - this.codecTypes.add(Codecs.None.getCodecType()); - this.codecTypes.add(Codecs.GzipCompressor.getCodecType()); - this.codecTypes.add(Codecs.SnappyCompressor.getCodecType()); - this.codecTypes.add(codecType); + private void addAll(Map decoders) { + Preconditions.checkNotNull(decoders); + this.decoders.putAll(decoders); } - private Decoder() { - this.decoder = DEFAULT; - codecTypes = new HashSet<>(); - this.codecTypes.add(Codecs.None.getCodecType()); - this.codecTypes.add(Codecs.GzipCompressor.getCodecType()); - this.codecTypes.add(Codecs.SnappyCompressor.getCodecType()); + ByteBuffer decode(CodecType codecType, ByteBuffer bytes) throws IOException { + return decoders.get(codecType.getName()).decode(bytes, codecType.getProperties()); } - ByteBuffer decode(CodecType codecType, ByteBuffer bytes) { - return decoder.apply(codecType, bytes); + Set getDecoderNames() { + return decoders.keySet(); } } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java index 2145b17ae..ce0e4d0f5 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java @@ -15,6 +15,7 @@ import lombok.extern.slf4j.Slf4j; import java.util.List; +import java.util.stream.Collectors; @Slf4j class SerializerFactoryHelper { @@ -47,14 +48,15 @@ private static void createGroup(SchemaRegistryClient client, SerializerConfig co private static void registerCodec(SchemaRegistryClient client, SerializerConfig config) { if (config.isRegisterCodec()) { - client.addCodecType(config.getGroupId(), config.getCodec().getCodecType()); + client.addCodecType(config.getGroupId(), config.getEncoder().getCodecType()); } } private static void failOnCodecMismatch(SchemaRegistryClient client, SerializerConfig config) { if (config.isFailOnCodecMismatch()) { - List codecTypesInGroup = client.getCodecTypes(config.getGroupId()); - if (!config.getDecoder().getCodecTypes().containsAll(codecTypesInGroup)) { + List codecTypesInGroup = client.getCodecTypes(config.getGroupId()).stream() + .map(CodecType::getName).collect(Collectors.toList()); + if (!config.getDecoders().getDecoderNames().containsAll(codecTypesInGroup)) { log.warn("Not all CodecTypes are supported by reader. Required codecTypes = {}", codecTypesInGroup); throw new RuntimeException(String.format("Need all codecTypes in %s", codecTypesInGroup.toString())); } diff --git a/serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java b/serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java deleted file mode 100644 index 846337065..000000000 --- a/serializers/src/test/java/io/pravega/schemaregistry/GroupIdTest.java +++ /dev/null @@ -1,27 +0,0 @@ -/** - * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ -package io.pravega.schemaregistry; - -import com.google.common.base.Charsets; -import org.junit.Test; - -import java.io.UnsupportedEncodingException; -import java.net.URLDecoder; - -import static org.junit.Assert.assertEquals; - -public class GroupIdTest { - @Test - public void testGroupId() throws UnsupportedEncodingException { - String groupId = GroupIdGenerator.getGroupId(GroupIdGenerator.Scheme.QualifiedStreamName, "scope", "stream"); - - assertEquals(URLDecoder.decode(groupId, Charsets.UTF_8.toString()), "scope/stream"); - } -} diff --git a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java index d1c84e4ab..0958dc185 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java @@ -10,7 +10,7 @@ package io.pravega.schemaregistry.codec; import com.google.common.base.Charsets; -import io.pravega.schemaregistry.serializers.Codecs; +import com.google.common.collect.ImmutableMap; import org.junit.Test; import java.io.IOException; @@ -26,24 +26,24 @@ public class CodecTest { public void testCodec() throws IOException { byte[] testStringBytes = "this is a test string".getBytes(Charsets.UTF_8); Codec snappy = Codecs.SnappyCompressor.getCodec(); - assertEquals(snappy.getCodecType(), Codecs.SnappyCompressor.getCodecType()); + assertEquals(snappy.getCodecType(), Codecs.SnappyCompressor.getCodec().getCodecType()); ByteBuffer encoded = snappy.encode(ByteBuffer.wrap(testStringBytes)); assertFalse(Arrays.equals(encoded.array(), testStringBytes)); - ByteBuffer decoded = snappy.decode(encoded); + ByteBuffer decoded = snappy.decode(encoded, ImmutableMap.of()); assertTrue(Arrays.equals(decoded.array(), testStringBytes)); Codec gzip = Codecs.GzipCompressor.getCodec(); - assertEquals(gzip.getCodecType(), Codecs.GzipCompressor.getCodecType()); + assertEquals(gzip.getCodecType(), Codecs.GzipCompressor.getCodec().getCodecType()); encoded = gzip.encode(ByteBuffer.wrap(testStringBytes)); assertFalse(Arrays.equals(encoded.array(), testStringBytes)); - decoded = gzip.decode(encoded); + decoded = gzip.decode(encoded, ImmutableMap.of()); assertTrue(Arrays.equals(decoded.array(), testStringBytes)); Codec none = Codecs.None.getCodec(); - assertEquals(none.getCodecType(), Codecs.None.getCodecType()); + assertEquals(none.getCodecType(), Codecs.None.getCodec().getCodecType()); encoded = none.encode(ByteBuffer.wrap(testStringBytes)); assertTrue(Arrays.equals(encoded.array(), testStringBytes)); - decoded = none.decode(encoded); + decoded = none.decode(encoded, ImmutableMap.of()); assertTrue(Arrays.equals(decoded.array(), testStringBytes)); } } diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java index 8e7bfed7d..283449552 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java @@ -11,6 +11,7 @@ import com.google.common.collect.ImmutableMap; import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.codec.Codecs; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; import io.pravega.schemaregistry.contract.data.SchemaInfo; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index 95134ba2e..9bbc0f0ce 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -22,6 +22,7 @@ import com.google.protobuf.GeneratedMessageV3; import io.pravega.client.stream.Serializer; import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.codec.Codecs; import io.pravega.schemaregistry.common.Either; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; @@ -79,15 +80,15 @@ public void testAvroSerializers() { doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); AvroSchema of = AvroSchema.of(SchemaDefinitions.ENUM); VersionInfo versionInfo3 = new VersionInfo(of.getSchema().getFullName(), 0, 2); doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(of.getSchemaInfo())); doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); - doAnswer(x -> new EncodingInfo(versionInfo3, of.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + doAnswer(x -> new EncodingInfo(versionInfo3, of.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); Serializer serializerStr = SerializerFactory.avroSerializer(config, of); GenericData.EnumSymbol enumSymbol = new GenericData.EnumSymbol(of.getSchema(), "a"); @@ -160,7 +161,7 @@ public void testAvroSerializersReflect() { .when(client).getGroupProperties(anyString()); doAnswer(x -> versionInfo1).when(client).getVersionForSchema(anyString(), eq(schema1.getSchemaInfo())); doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); Serializer serializer = SerializerFactory.avroSerializer(config, schema1); @@ -189,8 +190,8 @@ public void testProtobufSerializers() throws IOException { doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); Serializer serializer = SerializerFactory.protobufSerializer(config, schema1); @@ -253,8 +254,8 @@ public void testJsonSerializers() throws JsonProcessingException { doAnswer(x -> versionInfo2).when(client).getVersionForSchema(anyString(), eq(schema2.getSchemaInfo())); doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); Serializer serializer = SerializerFactory.jsonSerializer(config, schema1); @@ -282,7 +283,7 @@ public void testJsonSerializers() throws JsonProcessingException { VersionInfo versionInfo3 = new VersionInfo("myData", 0, 2); doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(myData.getSchemaInfo())); doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); - doAnswer(x -> new EncodingInfo(versionInfo3, myData.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + doAnswer(x -> new EncodingInfo(versionInfo3, myData.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); Serializer serializer2 = SerializerFactory.jsonSerializer(config, myData); Map jsonObject = new HashMap<>(); @@ -297,7 +298,7 @@ public void testJsonSerializers() throws JsonProcessingException { VersionInfo versionInfo4 = new VersionInfo("myData", 0, 3); doAnswer(x -> versionInfo4).when(client).getVersionForSchema(anyString(), eq(strSchema.getSchemaInfo())); doAnswer(x -> new EncodingId(3)).when(client).getEncodingId(anyString(), eq(versionInfo4), any()); - doAnswer(x -> new EncodingInfo(versionInfo4, strSchema.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(3))); + doAnswer(x -> new EncodingInfo(versionInfo4, strSchema.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(3))); Serializer serializer3 = SerializerFactory.jsonSerializer(config, strSchema); Serializer deserializer3 = SerializerFactory.jsonDeserializer(config, strSchema); @@ -367,9 +368,9 @@ public void testMultiformatDeserializers() throws IOException { doAnswer(x -> new EncodingId(0)).when(client).getEncodingId(anyString(), eq(versionInfo1), any()); doAnswer(x -> new EncodingId(1)).when(client).getEncodingId(anyString(), eq(versionInfo2), any()); doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); - doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); - doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); - doAnswer(x -> new EncodingInfo(versionInfo3, schema3.getSchemaInfo(), Codecs.None.getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); + doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); + doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); + doAnswer(x -> new EncodingInfo(versionInfo3, schema3.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); Serializer avroSerializer = SerializerFactory.avroSerializer(config, schema1); From 41175578ba18ea50517e01ab25a6fce8ecf01755 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Mon, 13 Jul 2020 01:18:25 -0700 Subject: [PATCH 57/70] PR comment Signed-off-by: Shivesh Ranjan --- .../client/SchemaRegistryClient.java | 2 +- .../client/SchemaRegistryClientConfig.java | 5 +-- .../client/SchemaRegistryClientFactory.java | 19 +++++++-- .../client/SchemaRegistryClientImpl.java | 17 ++++++-- .../pravega/schemaregistry/codec/Codecs.java | 23 +++++------ .../pravega/schemaregistry/codec/Encoder.java | 7 ++-- .../serializers/AbstractSerializer.java | 39 +++++++++---------- .../serializers/JsonDeserializer.java | 2 - .../serializers/SerializerConfig.java | 8 +++- .../serializers/SerializerFactoryHelper.java | 2 +- .../schemaregistry/codec/CodecTest.java | 32 +++++++++------ 11 files changed, 91 insertions(+), 65 deletions(-) diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java index 75f4b1b86..ead67d9e3 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClient.java @@ -38,7 +38,7 @@ * The implementation of this interface should provide read-after-write-consistency guarantees for all the methods. */ @Beta -public interface SchemaRegistryClient { +public interface SchemaRegistryClient extends AutoCloseable { /** * Adds a new group. A group refers to the name under which the schemas are registered. A group is identified by a * unique id and has an associated set of group metadata {@link GroupProperties} and a list of codec types and a diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java index 9dff18cc5..2ba913a52 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientConfig.java @@ -24,14 +24,12 @@ public class SchemaRegistryClientConfig { * URI for connecting with registry client. */ private final URI schemaRegistryUri; - private final String namespace; private final boolean authEnabled; private final String authMethod; private final String authToken; - private SchemaRegistryClientConfig(URI schemaRegistryUri, String namespace, boolean authEnabled, String authMethod, String authToken) { + private SchemaRegistryClientConfig(URI schemaRegistryUri, boolean authEnabled, String authMethod, String authToken) { this.schemaRegistryUri = schemaRegistryUri; - this.namespace = namespace; this.authEnabled = authEnabled; this.authMethod = authMethod; this.authToken = authToken; @@ -39,6 +37,5 @@ private SchemaRegistryClientConfig(URI schemaRegistryUri, String namespace, bool public static final class SchemaRegistryClientConfigBuilder { private boolean authEnabled = false; - private String namespace = null; } } diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientFactory.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientFactory.java index 1d36066b8..054924234 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientFactory.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientFactory.java @@ -14,12 +14,25 @@ */ public class SchemaRegistryClientFactory { /** - * Factory method to create Schema Registry Client. + * Factory method to create Schema Registry Client with default namespace. + * This sets the namespace context to use the default namespace (no namespace). * * @param config Configuration for creating registry client. * @return SchemaRegistry client implementation */ - public static SchemaRegistryClient createRegistryClient(SchemaRegistryClientConfig config) { - return new SchemaRegistryClientImpl(config); + public static SchemaRegistryClient withDefaultNamespace(SchemaRegistryClientConfig config) { + return new SchemaRegistryClientImpl(config, null); + } + + /** + * Factory method to create Schema Registry Client with namespace. + * This sets the namespace context for all calls to registry service. + * + * @param config Configuration for creating registry client. + * @param namespace Namespace + * @return SchemaRegistry client implementation + */ + public static SchemaRegistryClient withNamespace(String namespace, SchemaRegistryClientConfig config) { + return new SchemaRegistryClientImpl(config, namespace); } } diff --git a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java index 38b9b8039..912c56813 100644 --- a/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java +++ b/client/src/main/java/io/pravega/schemaregistry/client/SchemaRegistryClientImpl.java @@ -74,16 +74,17 @@ public class SchemaRegistryClientImpl implements SchemaRegistryClient { private final ApiV1.GroupsApi groupProxy; private final ApiV1.SchemasApi schemaProxy; private final String namespace; - - SchemaRegistryClientImpl(SchemaRegistryClientConfig config) { - Client client = ClientBuilder.newClient(new ClientConfig()); + private final Client client; + + SchemaRegistryClientImpl(SchemaRegistryClientConfig config, String namespace) { + client = ClientBuilder.newClient(new ClientConfig()); if (config.isAuthEnabled()) { client.register((ClientRequestFilter) context -> { context.getHeaders().add(HttpHeaders.AUTHORIZATION, AuthHelper.getAuthorizationHeader(config.getAuthMethod(), config.getAuthToken())); }); } - this.namespace = config.getNamespace(); + this.namespace = namespace; this.groupProxy = WebResourceFactory.newResource(ApiV1.GroupsApi.class, client.target(config.getSchemaRegistryUri())); this.schemaProxy = WebResourceFactory.newResource(ApiV1.SchemasApi.class, client.target(config.getSchemaRegistryUri())); } @@ -98,6 +99,7 @@ public class SchemaRegistryClientImpl implements SchemaRegistryClient { this.groupProxy = groupProxy; this.schemaProxy = schemaProxy; this.namespace = null; + this.client = null; } @Override @@ -476,4 +478,11 @@ private T handleResponse(Response.Status status, String errorMessage) { throw new InternalServerError(errorMessage); } } + + @Override + public void close() throws Exception { + if (client != null) { + client.close(); + } + } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java index 46a756893..e796e16c9 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java @@ -17,6 +17,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.InputStream; import java.nio.ByteBuffer; import java.util.Map; import java.util.zip.GZIPInputStream; @@ -51,8 +52,8 @@ public CodecType getCodecType() { } @Override - public ByteBuffer encode(ByteBuffer data) { - return data; + public void encode(ByteBuffer data, ByteArrayOutputStream bos) { + bos.write(data.array(), data.arrayOffset() + data.position(), data.remaining()); } @Override @@ -74,18 +75,15 @@ public CodecType getCodecType() { } @Override - public ByteBuffer encode(ByteBuffer data) throws IOException { - try (ByteArrayOutputStream bos = new ByteArrayOutputStream(data.remaining())) { - GZIPOutputStream gzipOS = new GZIPOutputStream(bos); + public void encode(ByteBuffer data, ByteArrayOutputStream bos) throws IOException { + try (GZIPOutputStream gzipOS = new GZIPOutputStream(bos)) { gzipOS.write(data.array(), data.arrayOffset() + data.position(), data.remaining()); - gzipOS.close(); - return ByteBuffer.wrap(bos.toByteArray()); } } @Override public ByteBuffer decode(ByteBuffer data, Map codecProperties) throws IOException { - ByteBufferBackedInputStream bis = new ByteBufferBackedInputStream(data); + InputStream bis = new ByteBufferBackedInputStream(data); return ByteBuffer.wrap(IOUtils.toByteArray(new GZIPInputStream(bis))); } } @@ -103,14 +101,13 @@ public CodecType getCodecType() { } @Override - public ByteBuffer encode(ByteBuffer data) throws IOException { + public void encode(ByteBuffer data, ByteArrayOutputStream bos) throws IOException { int capacity = Snappy.maxCompressedLength(data.remaining()); - ByteBuffer encoded = ByteBuffer.allocate(capacity); + byte[] encoded = new byte[capacity]; int size = Snappy.compress(data.array(), data.arrayOffset() + data.position(), - data.remaining(), encoded.array(), 0); - encoded.limit(size); - return encoded; + data.remaining(), encoded, 0); + bos.write(encoded, 0, size); } @Override diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java index ec5e78d19..9af01a8a4 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java @@ -11,6 +11,7 @@ import io.pravega.schemaregistry.contract.data.CodecType; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; @@ -29,9 +30,9 @@ public interface Encoder { * Implementation should encode the remaining bytes in the buffer and return a new ByteBuffer that includes * the encoded data at its current position. * - * @param data ByteBuffer to encode. - * @return encoded ByteBuffer with position set to the start of encoded data. + * @param data ByteBuffer to encode. + * @param outputStream ByteArrayOutputStream where the encoded data should be written. * @throws IOException IOException can be thrown while reading from or writing to byte buffers. */ - ByteBuffer encode(ByteBuffer data) throws IOException; + void encode(ByteBuffer data, ByteArrayOutputStream outputStream) throws IOException; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java index 2d98faace..9b6c8f12a 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java @@ -51,7 +51,8 @@ protected AbstractSerializer(String groupId, Preconditions.checkNotNull(client); Preconditions.checkNotNull(encoder); Preconditions.checkNotNull(schema); - + Preconditions.checkArgument(encodeHeader || encoder.equals(Codecs.None.getCodec()), + "Cannot use encoder if encoder header is false."); this.groupId = groupId; this.client = client; this.schemaInfo = schema.getSchemaInfo(); @@ -79,30 +80,26 @@ private void initialize() { @SneakyThrows(IOException.class) @Override public ByteBuffer serialize(T obj) { - EnhancedByteArrayOutputStream dataStream = new EnhancedByteArrayOutputStream(); + EnhancedByteArrayOutputStream outStream = new EnhancedByteArrayOutputStream(); + ByteBuffer byteBuffer; if (this.encodeHeader) { - dataStream.write(PROTOCOL); - BitConverter.writeInt(dataStream, encodingId.get().getId()); + outStream.write(PROTOCOL); + BitConverter.writeInt(outStream, encodingId.get().getId()); } - - serialize(obj, schemaInfo, dataStream); - - ByteBuffer byteBuffer; - byte[] serialized = dataStream.getData().array(); - if (!encoder.equals(Codecs.None.getCodec())) { - ByteBuffer wrap = ByteBuffer.wrap(serialized, HEADER_LENGTH, - dataStream.getData().getLength() - HEADER_LENGTH); - ByteBuffer encoded = encoder.encode(wrap); - int bufferSize = HEADER_LENGTH + encoded.remaining(); - byteBuffer = ByteBuffer.allocate(bufferSize); - // copy the header from serialized array into encoded output array - byteBuffer.put(serialized, 0, HEADER_LENGTH); - byteBuffer.put(encoded); - byteBuffer.rewind(); + + if (!this.encodeHeader || this.encoder.equals(Codecs.None.getCodec())) { + // write serialized data to the output stream + serialize(obj, schemaInfo, outStream); } else { - byteBuffer = ByteBuffer.wrap(serialized, 0, dataStream.getData().getLength()); + // encode header is true and encoder is supplied, encode the data + EnhancedByteArrayOutputStream serializedStream = new EnhancedByteArrayOutputStream(); + + serialize(obj, schemaInfo, serializedStream); + encoder.encode(ByteBuffer.wrap(serializedStream.getData().array()), outStream); } - + + byteBuffer = ByteBuffer.wrap(outStream.getData().array(), 0, outStream.getData().getLength()); + return byteBuffer; } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java index 62e794b21..a4a3ed9ea 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java @@ -33,8 +33,6 @@ class JsonDeserializer extends AbstractDeserializer { this.jsonSchema = schema; this.objectMapper = new ObjectMapper(); objectMapper.setVisibility(PropertyAccessor.ALL, Visibility.ANY); - objectMapper.setVisibility(PropertyAccessor.FIELD, Visibility.ANY); - objectMapper.setVisibility(PropertyAccessor.CREATOR, Visibility.ANY); } @Override diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index b5af52ede..6906dfa9d 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -46,6 +46,10 @@ public class SerializerConfig { */ @NonNull private final String groupId; + /** + * Namespace for the group. + */ + private final String namespace; /** * Either the registry client or the {@link SchemaRegistryClientConfig} that can be used for creating a new registry client. * Exactly one of the two option has to be supplied. @@ -105,12 +109,13 @@ public class SerializerConfig { */ private final boolean writeEncodingHeader; - private SerializerConfig(String groupId, SchemaRegistryClientConfig config, SchemaRegistryClient client, + private SerializerConfig(String groupId, String namespace, SchemaRegistryClientConfig config, SchemaRegistryClient client, boolean registerSchema, boolean registerCodec, Codec encoder, Decoders decoders, boolean failOnCodecMismatch, GroupProperties createGroup, boolean writeEncodingHeader) { Preconditions.checkArgument(!Strings.isNullOrEmpty(groupId), "Group id needs to be supplied"); Preconditions.checkArgument(client != null || config != null, "Either registry client or config needs to be supplied"); this.groupId = groupId; + this.namespace = namespace; this.registryClient = client; this.registryConfig = config; this.registerSchema = registerSchema; @@ -149,6 +154,7 @@ public static final class SerializerConfigBuilder { private boolean writeEncodingHeader = true; private SchemaRegistryClientConfig registryConfig = null; private SchemaRegistryClient registryClient = null; + private String namespace = null; /** * Add a decoder for decoding data encoded with the {@link Codec#getCodecType()}. diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java index ce0e4d0f5..9204293d3 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java @@ -36,7 +36,7 @@ static SchemaRegistryClient initForDeserializer(SerializerConfig config) { private static SchemaRegistryClient getSchemaRegistryClient(SerializerConfig config) { return config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.createRegistryClient(config.getRegistryConfigOrClient().getLeft()) : + SchemaRegistryClientFactory.withNamespace(config.getNamespace(), config.getRegistryConfigOrClient().getLeft()) : config.getRegistryConfigOrClient().getRight(); } diff --git a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java index 0958dc185..6e86596b1 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java @@ -11,15 +11,14 @@ import com.google.common.base.Charsets; import com.google.common.collect.ImmutableMap; +import io.pravega.common.io.EnhancedByteArrayOutputStream; import org.junit.Test; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.Assert.*; public class CodecTest { @Test @@ -27,23 +26,32 @@ public void testCodec() throws IOException { byte[] testStringBytes = "this is a test string".getBytes(Charsets.UTF_8); Codec snappy = Codecs.SnappyCompressor.getCodec(); assertEquals(snappy.getCodecType(), Codecs.SnappyCompressor.getCodec().getCodecType()); - ByteBuffer encoded = snappy.encode(ByteBuffer.wrap(testStringBytes)); - assertFalse(Arrays.equals(encoded.array(), testStringBytes)); + EnhancedByteArrayOutputStream byteArrayOutputStream = new EnhancedByteArrayOutputStream(); + snappy.encode(ByteBuffer.wrap(testStringBytes), byteArrayOutputStream); + ByteBuffer encoded = ByteBuffer.wrap(byteArrayOutputStream.getData().array(), 0, byteArrayOutputStream.getData().getLength()); + assertNotEquals(encoded.remaining(), testStringBytes.length); ByteBuffer decoded = snappy.decode(encoded, ImmutableMap.of()); assertTrue(Arrays.equals(decoded.array(), testStringBytes)); - + + byteArrayOutputStream = new EnhancedByteArrayOutputStream(); Codec gzip = Codecs.GzipCompressor.getCodec(); assertEquals(gzip.getCodecType(), Codecs.GzipCompressor.getCodec().getCodecType()); - encoded = gzip.encode(ByteBuffer.wrap(testStringBytes)); - assertFalse(Arrays.equals(encoded.array(), testStringBytes)); + gzip.encode(ByteBuffer.wrap(testStringBytes), byteArrayOutputStream); + encoded = ByteBuffer.wrap(byteArrayOutputStream.getData().array(), 0, byteArrayOutputStream.getData().getLength()); + assertNotEquals(encoded.remaining(), testStringBytes.length); decoded = gzip.decode(encoded, ImmutableMap.of()); assertTrue(Arrays.equals(decoded.array(), testStringBytes)); - + + byteArrayOutputStream = new EnhancedByteArrayOutputStream(); Codec none = Codecs.None.getCodec(); assertEquals(none.getCodecType(), Codecs.None.getCodec().getCodecType()); - encoded = none.encode(ByteBuffer.wrap(testStringBytes)); - assertTrue(Arrays.equals(encoded.array(), testStringBytes)); + none.encode(ByteBuffer.wrap(testStringBytes), byteArrayOutputStream); + encoded = ByteBuffer.wrap(byteArrayOutputStream.getData().array(), 0, byteArrayOutputStream.getData().getLength()); + assertEquals(encoded.remaining(), testStringBytes.length); decoded = none.decode(encoded, ImmutableMap.of()); - assertTrue(Arrays.equals(decoded.array(), testStringBytes)); + + byte[] decodedArray = new byte[decoded.remaining()]; + decoded.get(decodedArray); + assertTrue(Arrays.equals(decodedArray, testStringBytes)); } } From 982397493f596ffef0af0a93b9fb9fc3cab3cc3e Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Mon, 13 Jul 2020 07:30:28 -0700 Subject: [PATCH 58/70] PR comments Signed-off-by: Shivesh Ranjan --- .../pravega/schemaregistry/codec/Codecs.java | 1 - .../schemaregistry/schemas/JSONSchema.java | 21 ++++++++++++------- .../schemas/ProtobufSchema.java | 21 +++++++++++-------- .../schemaregistry/schemas/Schema.java | 7 +++++++ .../serializers/JsonDeserializer.java | 2 +- .../MultiFormatSerializerFactory.java | 14 ++++++++++++- .../serializers/ProtobufSerializer.java | 1 + .../serializers/WithSchema.java | 12 ++++++++++- .../schemaregistry/codec/CodecTest.java | 1 + 9 files changed, 59 insertions(+), 21 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java index e796e16c9..cb0c6b9b4 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java @@ -53,7 +53,6 @@ public CodecType getCodecType() { @Override public void encode(ByteBuffer data, ByteArrayOutputStream bos) { - bos.write(data.array(), data.arrayOffset() + data.position(), data.remaining()); } @Override diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java index 12c166491..589e3a11b 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -36,30 +36,30 @@ public class JSONSchema implements Schema { private final String schemaString; private final Class base; @Getter - private final Class tClass; - + private final Class derived; + @Getter private final JsonSchema schema; private final SchemaInfo schemaInfo; - private JSONSchema(JsonSchema schema, String name, String schemaString, Class tClass) { - this(schema, name, schemaString, tClass, tClass); + private JSONSchema(JsonSchema schema, String name, String schemaString, Class derived) { + this(schema, name, schemaString, derived, derived); } private JSONSchema(JsonSchema schema, String name, String schemaString, Class base, Class derived) { this.schemaString = schemaString; this.schemaInfo = new SchemaInfo(name, SerializationFormat.Json, getSchemaBytes(), ImmutableMap.of()); this.base = base; - this.tClass = derived; + this.derived = derived; this.schema = schema; } - private JSONSchema(SchemaInfo schemaInfo, JsonSchema schema, String schemaString, Class tClass) { + private JSONSchema(SchemaInfo schemaInfo, JsonSchema schema, String schemaString, Class derived) { this.schemaString = schemaString; this.schemaInfo = schemaInfo; - this.base = tClass; - this.tClass = tClass; + this.base = derived; + this.derived = derived; this.schema = schema; } @@ -172,4 +172,9 @@ private ByteBuffer getSchemaBytes() { public SchemaInfo getSchemaInfo() { return schemaInfo; } + + @Override + public Class getTClass() { + return base; + } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java index 4555052d2..2949e690d 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java @@ -37,18 +37,22 @@ public class ProtobufSchema implements Schema { @Getter private final Parser parser; @Getter + private final Class tClass; + @Getter private final FileDescriptorSet descriptorProto; private final SchemaInfo schemaInfo; - private ProtobufSchema(String name, Parser parser, FileDescriptorSet fileDescriptorSet) { + private ProtobufSchema(String name, Parser parser, Class tClass, FileDescriptorSet fileDescriptorSet) { this.parser = parser; + this.tClass = tClass; this.descriptorProto = fileDescriptorSet; this.schemaInfo = new SchemaInfo(name, SerializationFormat.Protobuf, getSchemaBytes(), ImmutableMap.of()); } - private ProtobufSchema(FileDescriptorSet fileDescriptorSet, SchemaInfo schemaInfo) { + private ProtobufSchema(FileDescriptorSet fileDescriptorSet, SchemaInfo schemaInfo, Class tClass) { this.parser = null; + this.tClass = null; this.descriptorProto = fileDescriptorSet; this.schemaInfo = schemaInfo; } @@ -74,7 +78,7 @@ public SchemaInfo getSchemaInfo() { public static ProtobufSchema of(Class tClass) { Extractor extractor = new Extractor<>(tClass).invoke(); - return new ProtobufSchema(extractor.getFullName(), extractor.getParser(), + return new ProtobufSchema(extractor.getFullName(), extractor.getParser(), tClass, extractor.getFileDescriptorSet()); } @@ -88,7 +92,7 @@ public static ProtobufSchema of(Class tClas */ public static ProtobufSchema of(Class tClass, FileDescriptorSet fileDescriptorSet) { Extractor extractor = new Extractor<>(tClass).invoke(); - return new ProtobufSchema(extractor.getFullName(), extractor.getParser(), fileDescriptorSet); + return new ProtobufSchema(extractor.getFullName(), extractor.getParser(), tClass, fileDescriptorSet); } /** @@ -102,7 +106,7 @@ public static ProtobufSchema of(Class tClas * @return {@link ProtobufSchema} with generic type {@link DynamicMessage} that captures protobuf schema. */ public static ProtobufSchema of(String name, FileDescriptorSet fileDescriptorSet) { - return new ProtobufSchema<>(name, null, fileDescriptorSet); + return new ProtobufSchema<>(name, null, DynamicMessage.class, fileDescriptorSet); } /** @@ -121,7 +125,7 @@ public static ProtobufSchema Class tClass, FileDescriptorSet fileDescriptorSet) { Extractor extractor = new Extractor<>(tClass).invoke(); - return new ProtobufSchema<>(extractor.getFullName(), (Parser) extractor.getParser(), fileDescriptorSet); + return new ProtobufSchema<>(extractor.getFullName(), (Parser) extractor.getParser(), GeneratedMessageV3.class, fileDescriptorSet); } /** @@ -140,7 +144,7 @@ public static ProtobufSchema Extractor extractor = new Extractor<>(tClass).invoke(); return new ProtobufSchema<>(extractor.getFullName(), - (Parser) extractor.getParser(), extractor.getFileDescriptorSet()); + (Parser) extractor.getParser(), GeneratedMessageV3.class, extractor.getFileDescriptorSet()); } /** @@ -153,7 +157,7 @@ public static ProtobufSchema from(SchemaInfo schemaInfo) { try { FileDescriptorSet fileDescriptorSet = FileDescriptorSet.parseFrom(schemaInfo.getSchemaData()); - return new ProtobufSchema<>(fileDescriptorSet, schemaInfo); + return new ProtobufSchema<>(fileDescriptorSet, schemaInfo, DynamicMessage.class); } catch (InvalidProtocolBufferException ex) { throw new IllegalArgumentException("Unable to get protobuf schema from schemainfo", ex); } @@ -176,7 +180,6 @@ String getFullName() { } FileDescriptorSet getFileDescriptorSet() { - // TODO: verify that the file proto has descriptors for all message types return FileDescriptorSet .newBuilder().addFile(defaultInstance.getDescriptorForType().getFile().toProto()).build(); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/Schema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/Schema.java index d3bfc834e..a498779eb 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/Schema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/Schema.java @@ -24,4 +24,11 @@ public interface Schema { * @return Schema Info object derived from the schema object. */ SchemaInfo getSchemaInfo(); + + /** + * Class for the Type of object. + * + * @return Class of type T + */ + Class getTClass(); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java index a4a3ed9ea..0a8837df6 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonDeserializer.java @@ -37,6 +37,6 @@ class JsonDeserializer extends AbstractDeserializer { @Override protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { - return objectMapper.readValue(inputStream, jsonSchema.getTClass()); + return objectMapper.readValue(inputStream, jsonSchema.getDerived()); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java index 8b1766923..713b3efff 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java @@ -18,6 +18,7 @@ import io.pravega.schemaregistry.schemas.AvroSchema; import io.pravega.schemaregistry.schemas.JSONSchema; import io.pravega.schemaregistry.schemas.ProtobufSchema; +import io.pravega.schemaregistry.schemas.Schema; import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; @@ -183,8 +184,19 @@ private static AbstractSerializer getCustomSerializer( SchemaRegistryClient schemaRegistryClient, String groupId, SchemaInfo schemaInfo) { if (customSerializers.containsKey(schemaInfo.getSerializationFormat())) { CustomSerializer serializer = customSerializers.get(schemaInfo.getSerializationFormat()); + Schema schema = new Schema() { + @Override + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + @Override + public Class getTClass() { + return Object.class; + } + }; return new AbstractSerializer(groupId, schemaRegistryClient, - () -> schemaInfo, config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()) { + schema, config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()) { @Override protected void serialize(Object var, SchemaInfo schema, OutputStream outputStream) { serializer.serialize(var, schema, outputStream); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java index 13954eca4..e95ae6672 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializer.java @@ -27,5 +27,6 @@ class ProtobufSerializer extends AbstractSerializer { @Override protected void serialize(T var, SchemaInfo schemaInfo, OutputStream outputStream) throws IOException { var.writeTo(outputStream); + outputStream.flush(); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java index 30cf3ce94..56393b60f 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java @@ -71,7 +71,17 @@ private Schema convertToSchema(SchemaInfo schemaInfo) { schema = JSONSchema.from(schemaInfo); break; case Custom: - schema = () -> schemaInfo; + schema = new Schema() { + @Override + public SchemaInfo getSchemaInfo() { + return schemaInfo; + } + + @Override + public Class getTClass() { + return Object.class; + } + }; break; default: throw new IllegalArgumentException("Serialization format not supported"); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java index 6e86596b1..646948925 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java @@ -46,6 +46,7 @@ public void testCodec() throws IOException { Codec none = Codecs.None.getCodec(); assertEquals(none.getCodecType(), Codecs.None.getCodec().getCodecType()); none.encode(ByteBuffer.wrap(testStringBytes), byteArrayOutputStream); + byteArrayOutputStream.write(testStringBytes); encoded = ByteBuffer.wrap(byteArrayOutputStream.getData().array(), 0, byteArrayOutputStream.getData().getLength()); assertEquals(encoded.remaining(), testStringBytes.length); decoded = none.decode(encoded, ImmutableMap.of()); From 7ee6fe0195924225083eac8247d0739e5de2dae8 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Tue, 14 Jul 2020 01:53:38 -0700 Subject: [PATCH 59/70] PR comments Signed-off-by: Shivesh Ranjan --- .../schemaregistry/serializers/ProtobufDeserializer.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserializer.java index f0756bb2b..9cd4a460f 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufDeserializer.java @@ -16,6 +16,7 @@ import io.pravega.schemaregistry.contract.data.SchemaInfo; import io.pravega.schemaregistry.schemas.ProtobufSchema; +import java.io.IOException; import java.io.InputStream; public class ProtobufDeserializer extends AbstractDeserializer { @@ -29,11 +30,11 @@ public class ProtobufDeserializer extends Abstract } @Override - protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) { + protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { try { return protobufSchema.getParser().parseFrom(inputStream); } catch (InvalidProtocolBufferException e) { - throw new IllegalArgumentException("Invalid bytes", e); + throw new IOException("Invalid protobuffer serialized bytes", e); } } } From ff47289e4b68ad98d8397933b7ede1fda7033730 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Tue, 14 Jul 2020 08:28:51 -0700 Subject: [PATCH 60/70] PR comments Signed-off-by: Shivesh Ranjan --- .../pravega/schemaregistry/schemas/JSONSchema.java | 2 +- .../serializers/AvroDeserializer.java | 13 ++++++++----- .../serializers/BaseDeserializer.java | 2 +- .../schemaregistry/serializers/BaseSerializer.java | 2 +- .../schemaregistry/serializers/EncodingCache.java | 5 ++--- .../serializers/MultiFormatSerializerFactory.java | 4 ++-- ...rlizer.java => ProtobufGenericDeserializer.java} | 6 +++--- .../serializers/ProtobufSerializerFactory.java | 4 ++-- .../serializers/SerializerFactoryHelper.java | 1 - .../schemas/{TestSchemas.java => SchemasTest.java} | 2 +- 10 files changed, 21 insertions(+), 20 deletions(-) rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{ProtobufGenericDeserlizer.java => ProtobufGenericDeserializer.java} (92%) rename serializers/src/test/java/io/pravega/schemaregistry/schemas/{TestSchemas.java => SchemasTest.java} (99%) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java index 589e3a11b..b7a252a92 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -65,7 +65,7 @@ private JSONSchema(SchemaInfo schemaInfo, JsonSchema schema, String schemaString /** * Method to create a typed JSONSchema for the given class. It extracts the json schema from the class. - * For POJOs the schema is extracted using jacksons {@link JsonSchemaGenerator}. + * For POJOs the schema is extracted using jackson's {@link JsonSchemaGenerator}. * * @param tClass Class whose object's schema is used. * @param Type of the Java class. diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserializer.java index 6cab88051..a8db6971b 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AvroDeserializer.java @@ -41,11 +41,14 @@ class AvroDeserializer extends AbstractDeserializer { @Override protected T deserialize(InputStream inputStream, SchemaInfo writerSchemaInfo, SchemaInfo readerSchemaInfo) throws IOException { Preconditions.checkNotNull(writerSchemaInfo); - Schema writerSchema = knownSchemas.computeIfAbsent(writerSchemaInfo, x -> { - String schemaString = new String(x.getSchemaData().array(), Charsets.UTF_8); - return new Schema.Parser().parse(schemaString); - - }); + Schema writerSchema; + if (knownSchemas.containsKey(writerSchemaInfo)) { + writerSchema = knownSchemas.get(writerSchemaInfo); + } else { + String schemaString = new String(writerSchemaInfo.getSchemaData().array(), Charsets.UTF_8); + writerSchema = new Schema.Parser().parse(schemaString); + knownSchemas.put(writerSchemaInfo, writerSchema); + } Schema readerSchema = avroSchema.getSchema(); BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseDeserializer.java index 07538d1f2..015a2036f 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseDeserializer.java @@ -16,6 +16,6 @@ abstract class BaseDeserializer implements Serializer { @Override public final ByteBuffer serialize(T value) { - throw new IllegalStateException(); + throw new UnsupportedOperationException(); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseSerializer.java index 7d77bda36..8b7844fc2 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/BaseSerializer.java @@ -16,6 +16,6 @@ abstract class BaseSerializer implements Serializer { @Override public final T deserialize(ByteBuffer serializedValue) { - throw new IllegalStateException(); + throw new UnsupportedOperationException(); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java index f301dcefa..9c005d86a 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java @@ -19,20 +19,19 @@ import io.pravega.schemaregistry.contract.data.EncodingInfo; import lombok.Data; -import java.time.Duration; import java.util.concurrent.ExecutionException; /** * Local cache for storing schemas that are retrieved from the registry service. */ public class EncodingCache { - private static final Duration EXPIRY_AFTER_ACCESS = Duration.ofMinutes(20); + private static final int MAXIMUM_SIZE = 1000; private final LoadingCache encodingCache; EncodingCache(String groupId, SchemaRegistryClient schemaRegistryClient) { encodingCache = CacheBuilder.newBuilder() - .expireAfterAccess(EXPIRY_AFTER_ACCESS) + .maximumSize(MAXIMUM_SIZE) .build(new CacheLoader() { @Override public EncodingInfo load(EncodingId key) { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java index 713b3efff..54375ba0c 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java @@ -101,7 +101,7 @@ private static Serializer deserializeAsTInternal(SerializerConfig config, AbstractDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); - AbstractDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoders(), + AbstractDeserializer protobuf = new ProtobufGenericDeserializer(groupId, schemaRegistryClient, null, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); AbstractDeserializer avro = new AvroGenericDeserializer(groupId, schemaRegistryClient, null, config.getDecoders(), encodingCache); @@ -133,7 +133,7 @@ private static Serializer> deserializerInternal(SerializerConf AbstractDeserializer json = new JsonGenericDeserializer(config.getGroupId(), schemaRegistryClient, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); - AbstractDeserializer protobuf = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, config.getDecoders(), + AbstractDeserializer protobuf = new ProtobufGenericDeserializer(groupId, schemaRegistryClient, null, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); AbstractDeserializer avro = new AvroGenericDeserializer(groupId, schemaRegistryClient, null, config.getDecoders(), encodingCache); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserializer.java similarity index 92% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserializer.java index ac70046f2..5a80cd218 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserlizer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserializer.java @@ -25,11 +25,11 @@ import java.io.InputStream; import java.util.concurrent.ConcurrentHashMap; -public class ProtobufGenericDeserlizer extends AbstractDeserializer { +public class ProtobufGenericDeserializer extends AbstractDeserializer { private final ConcurrentHashMap knownSchemas; - ProtobufGenericDeserlizer(String groupId, SchemaRegistryClient client, @Nullable ProtobufSchema schema, - SerializerConfig.Decoders decoder, EncodingCache encodingCache, boolean encodeHeader) { + ProtobufGenericDeserializer(String groupId, SchemaRegistryClient client, @Nullable ProtobufSchema schema, + SerializerConfig.Decoders decoder, EncodingCache encodingCache, boolean encodeHeader) { super(groupId, client, schema, false, decoder, encodingCache, encodeHeader); Preconditions.checkArgument(isEncodeHeader() || schema != null); knownSchemas = new ConcurrentHashMap<>(); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java index 40ca25185..ee3975ddc 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufSerializerFactory.java @@ -64,7 +64,7 @@ static Serializer genericDeserializer(SerializerConfig config, @ String groupId = config.getGroupId(); EncodingCache encodingCache = new EncodingCache(groupId, schemaRegistryClient); - return new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache, + return new ProtobufGenericDeserializer(groupId, schemaRegistryClient, schema, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); } @@ -114,7 +114,7 @@ static Serializer> type .values().stream().collect(Collectors.toMap(x -> x.getSchemaInfo().getType(), x -> new ProtobufDeserializer<>(groupId, schemaRegistryClient, x, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()))); - ProtobufGenericDeserlizer genericDeserializer = new ProtobufGenericDeserlizer(groupId, schemaRegistryClient, null, + ProtobufGenericDeserializer genericDeserializer = new ProtobufGenericDeserializer(groupId, schemaRegistryClient, null, config.getDecoders(), encodingCache, config.isWriteEncodingHeader()); return new MultiplexedAndGenericDeserializer<>(groupId, schemaRegistryClient, deserializerMap, genericDeserializer, config.getDecoders(), encodingCache); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java index 9204293d3..be13b9c08 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java @@ -28,7 +28,6 @@ static SchemaRegistryClient initForSerializer(SerializerConfig config) { static SchemaRegistryClient initForDeserializer(SerializerConfig config) { SchemaRegistryClient schemaRegistryClient = getSchemaRegistryClient(config); - createGroup(schemaRegistryClient, config); failOnCodecMismatch(schemaRegistryClient, config); return schemaRegistryClient; diff --git a/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java b/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java similarity index 99% rename from serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java rename to serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java index 247e40928..07aed6fba 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/schemas/TestSchemas.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java @@ -32,7 +32,7 @@ import static io.pravega.schemaregistry.testobjs.SchemaDefinitions.JSON_SCHEMA_STRING; import static org.junit.Assert.*; -public class TestSchemas { +public class SchemasTest { @Test public void testAvroSchema() { AvroSchema schema = AvroSchema.of(SchemaDefinitions.SCHEMA1); From 6c3fd87a5bcf73bedf95eb50445ac9616a2cd49a Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Tue, 14 Jul 2020 23:24:59 -0700 Subject: [PATCH 61/70] PR comments Signed-off-by: Shivesh Ranjan --- .../pravega/schemaregistry/codec/Codecs.java | 26 +++++++++++++------ .../pravega/schemaregistry/codec/Encoder.java | 2 ++ .../serializers/AbstractDeserializer.java | 15 ++++++----- .../serializers/EncodingCache.java | 18 ++++++++----- .../MultiFormatSerializerFactory.java | 3 ++- .../serializers/WithSchema.java | 6 ++++- .../schemaregistry/serializers/CacheTest.java | 22 +++++++++++++++- 7 files changed, 68 insertions(+), 24 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java index cb0c6b9b4..34fd14cc5 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java @@ -75,8 +75,10 @@ public CodecType getCodecType() { @Override public void encode(ByteBuffer data, ByteArrayOutputStream bos) throws IOException { + byte[] b = data.hasArray() ? data.array() : getBytes(data); + int offset = data.hasArray() ? data.arrayOffset() + data.position() : 0; try (GZIPOutputStream gzipOS = new GZIPOutputStream(bos)) { - gzipOS.write(data.array(), data.arrayOffset() + data.position(), data.remaining()); + gzipOS.write(b, offset, data.remaining()); } } @@ -87,6 +89,12 @@ public ByteBuffer decode(ByteBuffer data, Map codecProperties) t } } + private static byte[] getBytes(ByteBuffer data) { + byte[] b = new byte[data.remaining()]; + data.get(b); + return b; + } + private static class SnappyCodec implements Codec { private static final CodecType CODEC_TYPE_SNAPPY = new CodecType(Constants.APPLICATION_X_SNAPPY_FRAMED); @Override @@ -103,18 +111,20 @@ public CodecType getCodecType() { public void encode(ByteBuffer data, ByteArrayOutputStream bos) throws IOException { int capacity = Snappy.maxCompressedLength(data.remaining()); byte[] encoded = new byte[capacity]; - - int size = Snappy.compress(data.array(), data.arrayOffset() + data.position(), - data.remaining(), encoded, 0); + + byte[] b = data.hasArray() ? data.array() : getBytes(data); + int offset = data.hasArray() ? data.arrayOffset() + data.position() : 0; + int size = Snappy.compress(b, offset, data.remaining(), encoded, 0); bos.write(encoded, 0, size); } @Override public ByteBuffer decode(ByteBuffer data, Map codecProperties) throws IOException { - ByteBuffer decoded = ByteBuffer.allocate(Snappy.uncompressedLength(data.array(), data.arrayOffset() + data.position(), - data.remaining())); - Snappy.uncompress(data.array(), data.arrayOffset() + data.position(), - data.remaining(), decoded.array(), 0); + byte[] b = data.hasArray() ? data.array() : getBytes(data); + int offset = data.hasArray() ? data.arrayOffset() + data.position() : 0; + + ByteBuffer decoded = ByteBuffer.allocate(Snappy.uncompressedLength(b, offset, data.remaining())); + Snappy.uncompress(b, offset, data.remaining(), decoded.array(), 0); return decoded; } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java index 9af01a8a4..e6e9764e8 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Encoder.java @@ -30,6 +30,8 @@ public interface Encoder { * Implementation should encode the remaining bytes in the buffer and return a new ByteBuffer that includes * the encoded data at its current position. * + * The implementation can optionally call flush or close on outputstream with no consequence. + * * @param data ByteBuffer to encode. * @param outputStream ByteArrayOutputStream where the encoded data should be written. * @throws IOException IOException can be thrown while reading from or writing to byte buffers. diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java index 67807d4ae..7400fb6ab 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java @@ -75,10 +75,7 @@ private void initialize() { @SneakyThrows(IOException.class) @Override public T deserialize(ByteBuffer data) { - if (!data.hasArray()) { - return null; - } - int start = data.arrayOffset() + data.position(); + int start = data.hasArray() ? data.arrayOffset() + data.position() : data.position(); if (this.encodeHeader) { SchemaInfo writerSchema = null; ByteBuffer decoded; @@ -103,9 +100,15 @@ public T deserialize(ByteBuffer data) { return deserialize(bais, writerSchema, schemaInfo); } } else { + byte[] b; + if (data.hasArray()) { + b = data.array(); + } else { + b = new byte[data.remaining()]; + data.get(b); + } // pass reader schema for schema on read to the underlying implementation - ByteArrayInputStream inputStream = new ByteArrayInputStream(data.array(), - data.arrayOffset() + data.position(), data.remaining()); + ByteArrayInputStream inputStream = new ByteArrayInputStream(b, start, data.remaining()); return deserialize(inputStream, null, schemaInfo); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java index 9c005d86a..bf1910767 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/EncodingCache.java @@ -9,6 +9,7 @@ */ package io.pravega.schemaregistry.serializers; +import com.google.common.annotations.VisibleForTesting; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; @@ -17,8 +18,8 @@ import io.pravega.schemaregistry.client.exceptions.RegistryExceptions; import io.pravega.schemaregistry.contract.data.EncodingId; import io.pravega.schemaregistry.contract.data.EncodingInfo; -import lombok.Data; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; /** @@ -28,10 +29,14 @@ public class EncodingCache { private static final int MAXIMUM_SIZE = 1000; private final LoadingCache encodingCache; - EncodingCache(String groupId, SchemaRegistryClient schemaRegistryClient) { + this(groupId, schemaRegistryClient, MAXIMUM_SIZE); + } + + @VisibleForTesting + EncodingCache(String groupId, SchemaRegistryClient schemaRegistryClient, int cacheSize) { encodingCache = CacheBuilder.newBuilder() - .maximumSize(MAXIMUM_SIZE) + .maximumSize(cacheSize) .build(new CacheLoader() { @Override public EncodingInfo load(EncodingId key) { @@ -52,9 +57,8 @@ EncodingInfo getGroupEncodingInfo(EncodingId encodingId) { } } - @Data - private static class Key { - private final SchemaRegistryClient client; - private final String groupId; + @VisibleForTesting + ConcurrentMap getMapForCache() { + return encodingCache.asMap(); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java index 54375ba0c..abe8798b3 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java @@ -9,6 +9,7 @@ */ package io.pravega.schemaregistry.serializers; +import com.fasterxml.jackson.databind.JsonNode; import com.google.common.base.Preconditions; import com.google.protobuf.DynamicMessage; import io.pravega.client.stream.Serializer; @@ -59,7 +60,7 @@ static Serializer> deserializerWithSchema(SerializerConfig co * Formats supported are protobuf, avro and json. * An event serialized with avro is deserialized into {@link GenericRecord}. * An event serialized with protobuf is deserialized into {@link DynamicMessage}. - * An event serialized with json is deserialized into {@link java.util.LinkedHashMap}. + * An event serialized with json is deserialized into WithSchema containing {@link JsonNode} and {@link JSONSchema}. * * This also takes a transform function which is applied on the deserialized object and should transform the object * into the type T. diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java index 56393b60f..ac84cece3 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java @@ -188,7 +188,11 @@ private static String toJsonString(SerializationFormat format, Object deserializ jsonString = PRINTER.print((DynamicMessage) deserialize); break; case Json: - jsonString = OBJECT_MAPPER.writeValueAsString(((WithSchema) deserialize).object); + if (deserialize instanceof WithSchema) { + jsonString = OBJECT_MAPPER.writeValueAsString(((WithSchema) deserialize).object); + } else { + jsonString = OBJECT_MAPPER.writeValueAsString(deserialize); + } break; default: jsonString = OBJECT_MAPPER.writeValueAsString(deserialize); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java index 283449552..313355dbd 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/CacheTest.java @@ -23,6 +23,7 @@ import java.util.concurrent.ExecutionException; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -37,7 +38,26 @@ public void testCache() throws ExecutionException { new SchemaInfo("name", SerializationFormat.Avro, ByteBuffer.wrap(new byte[0]), ImmutableMap.of()), Codecs.SnappyCompressor.getCodec().getCodecType()); doAnswer(x -> encodingInfo).when(client).getEncodingInfo(eq(groupId), eq(encodingId)); - EncodingCache cache = new EncodingCache(groupId, client); + EncodingId encodingId2 = new EncodingId(1); + EncodingInfo encodingInfo2 = new EncodingInfo(new VersionInfo("name", 0, 1), + new SchemaInfo("name", SerializationFormat.Avro, ByteBuffer.wrap(new byte[0]), ImmutableMap.of()), + Codecs.SnappyCompressor.getCodec().getCodecType()); + doAnswer(x -> encodingInfo2).when(client).getEncodingInfo(eq(groupId), eq(encodingId2)); + EncodingId encodingId3 = new EncodingId(2); + EncodingInfo encodingInfo3 = new EncodingInfo(new VersionInfo("name", 0, 2), + new SchemaInfo("name", SerializationFormat.Avro, ByteBuffer.wrap(new byte[0]), ImmutableMap.of()), + Codecs.SnappyCompressor.getCodec().getCodecType()); + doAnswer(x -> encodingInfo3).when(client).getEncodingInfo(eq(groupId), eq(encodingId3)); + // create a cache with max size 2 + EncodingCache cache = new EncodingCache(groupId, client, 2); + assertEquals(cache.getMapForCache().size(), 0); assertEquals(encodingInfo, cache.getGroupEncodingInfo(encodingId)); + assertEquals(cache.getMapForCache().size(), 1); + assertEquals(encodingInfo2, cache.getGroupEncodingInfo(encodingId2)); + assertEquals(cache.getMapForCache().size(), 2); + assertEquals(encodingInfo3, cache.getGroupEncodingInfo(encodingId3)); + assertEquals(cache.getMapForCache().size(), 2); + assertTrue(cache.getMapForCache().containsKey(encodingId2)); + assertTrue(cache.getMapForCache().containsKey(encodingId3)); } } From 2d6711d2758d71d372e7ad2455b3ebbcfb1a6f03 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 15 Jul 2020 02:07:10 -0700 Subject: [PATCH 62/70] adding avro protobuf and json creator methods in WithSchema Signed-off-by: Shivesh Ranjan --- .../serializers/SerializerConfig.java | 2 +- .../serializers/SerializerFactory.java | 13 ++++--- .../serializers/WithSchema.java | 37 +++++++++++++++++++ 3 files changed, 46 insertions(+), 6 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index 6906dfa9d..59138180d 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -110,7 +110,7 @@ public class SerializerConfig { private final boolean writeEncodingHeader; private SerializerConfig(String groupId, String namespace, SchemaRegistryClientConfig config, SchemaRegistryClient client, - boolean registerSchema, boolean registerCodec, Codec encoder, Decoders decoders, boolean failOnCodecMismatch, + boolean registerSchema, boolean registerCodec, Encoder encoder, Decoders decoders, boolean failOnCodecMismatch, GroupProperties createGroup, boolean writeEncodingHeader) { Preconditions.checkArgument(!Strings.isNullOrEmpty(groupId), "Group id needs to be supplied"); Preconditions.checkArgument(client != null || config != null, "Either registry client or config needs to be supplied"); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index 8d9fc49c6..17c82be29 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -10,6 +10,7 @@ package io.pravega.schemaregistry.serializers; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.module.jsonSchema.JsonSchema; import com.google.protobuf.DynamicMessage; import com.google.protobuf.GeneratedMessageV3; import com.google.protobuf.Message; @@ -29,6 +30,7 @@ import java.util.Map; import java.util.function.BiFunction; +import static com.google.protobuf.DescriptorProtos.FileDescriptorSet; import static io.pravega.schemaregistry.serializers.WithSchema.JSON_TRANSFORM; import static io.pravega.schemaregistry.serializers.WithSchema.NO_TRANSFORM; @@ -378,9 +380,10 @@ public static Serializer> serializerWithSchema(SerializerConf /** * A deserializer that can deserialize data where each event could be written with either of avro, protobuf or json * serialization formats. It deserializes them into format specific generic objects. - * An event serialized with avro is deserialized into {@link GenericRecord}. - * An event serialized with protobuf is deserialized into {@link DynamicMessage}. - * An event serialized with json is deserialized into {@link JsonNode}. + * An event serialized with avro is deserialized into {@link GenericRecord} or {@link Object} with schema as {@link org.apache.avro.Schema}. + * An event serialized with protobuf is deserialized into {@link DynamicMessage} with schema as {@link FileDescriptorSet}. + * An event serialized with json is deserialized into a {@link JsonNode} with schema as {@link JsonSchema}. + * The object and schema are wrapped in {@link WithSchema} object. * * @param config serializer config * @return a deserializer that can deserialize events serialized as protobuf, json or avro into java objects. @@ -394,7 +397,7 @@ public static Serializer> deserializerWithSchema(SerializerCo * serialization formats. * An event serialized with avro is deserialized into {@link GenericRecord}. * An event serialized with protobuf is deserialized into {@link DynamicMessage}. - * An event serialized with json is deserialized into {@link JsonNode}. + * An event serialized with json is deserialized into {@link WithSchema} object of {@link JsonNode} and {@link JsonSchema}. * * @param config serializer config * @return a deserializer that can deserialize events serialized as protobuf, json or avro into java objects. @@ -419,7 +422,7 @@ public static Serializer deserializeAsJsonString(SerializerConfig config * Formats supported are protobuf, avro and json. * An event serialized with avro is deserialized into {@link GenericRecord}. * An event serialized with protobuf is deserialized into {@link DynamicMessage}. - * An event serialized with json is deserialized into {@link JsonNode}. + * An event serialized with json is deserialized into {@link WithSchema} object of {@link JsonNode} and {@link JsonSchema}. * * This also takes a transform function which is applied on the deserialized object and should transform the object * into the type T. diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java index ac84cece3..6d6ab58fd 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java @@ -14,6 +14,7 @@ import com.fasterxml.jackson.module.jsonSchema.JsonSchema; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DynamicMessage; +import com.google.protobuf.GeneratedMessageV3; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.util.JsonFormat; import io.pravega.schemaregistry.contract.data.SchemaInfo; @@ -202,4 +203,40 @@ private static String toJsonString(SerializationFormat format, Object deserializ } return jsonString; } + + /** + * Create WithSchema object for avro. + * + * @param object Object. + * @param avroSchema Avro Schema for object. + * @param Type of object. + * @return A WithSchema object which has Avro Schema and the corresponding object. + */ + public static WithSchema avro(T object, AvroSchema avroSchema) { + return new WithSchema<>(avroSchema.getSchemaInfo(), object, (x, y) -> object); + } + + /** + * Create WithSchema object for protobuf. + * + * @param object Object. + * @param protobufSchema Protobuf Schema for object. + * @param Type of object. + * @return A WithSchema object which has Protobuf Schema and the corresponding object. + */ + public static WithSchema proto(T object, ProtobufSchema protobufSchema) { + return new WithSchema<>(protobufSchema.getSchemaInfo(), object, (x, y) -> object); + } + + /** + * Create WithSchema object for json. + * + * @param object Object. + * @param jsonSchema Json Schema for object. + * @param Type of object. + * @return A WithSchema object which has Json schema and the corresponding object. + */ + public static WithSchema json(T object, JSONSchema jsonSchema) { + return new WithSchema<>(jsonSchema.getSchemaInfo(), object, (x, y) -> object); + } } From ade09af4a85006a72ecb1085b85ad4e9c8f53fef Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 15 Jul 2020 21:34:58 -0700 Subject: [PATCH 63/70] PR comments, javadoc fix, and return schema string instead of JsonSchema object Signed-off-by: Shivesh Ranjan --- .../schemaregistry/schemas/ProtobufSchema.java | 8 ++++---- .../serializers/AbstractSerializer.java | 1 - .../serializers/ProtobufGenericDeserializer.java | 2 +- .../schemaregistry/serializers/WithSchema.java | 8 ++++---- .../schemaregistry/schemas/SchemasTest.java | 14 +++++++------- 5 files changed, 16 insertions(+), 17 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java index 2949e690d..990763c34 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/ProtobufSchema.java @@ -39,26 +39,26 @@ public class ProtobufSchema implements Schema { @Getter private final Class tClass; @Getter - private final FileDescriptorSet descriptorProto; + private final FileDescriptorSet fileDescriptorSet; private final SchemaInfo schemaInfo; private ProtobufSchema(String name, Parser parser, Class tClass, FileDescriptorSet fileDescriptorSet) { this.parser = parser; this.tClass = tClass; - this.descriptorProto = fileDescriptorSet; + this.fileDescriptorSet = fileDescriptorSet; this.schemaInfo = new SchemaInfo(name, SerializationFormat.Protobuf, getSchemaBytes(), ImmutableMap.of()); } private ProtobufSchema(FileDescriptorSet fileDescriptorSet, SchemaInfo schemaInfo, Class tClass) { this.parser = null; this.tClass = null; - this.descriptorProto = fileDescriptorSet; + this.fileDescriptorSet = fileDescriptorSet; this.schemaInfo = schemaInfo; } private ByteBuffer getSchemaBytes() { - return ByteBuffer.wrap(descriptorProto.toByteArray()); + return ByteBuffer.wrap(fileDescriptorSet.toByteArray()); } @Override diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java index 9b6c8f12a..66d462d43 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractSerializer.java @@ -29,7 +29,6 @@ abstract class AbstractSerializer extends BaseSerializer { private static final byte PROTOCOL = 0x1; - private static final int HEADER_LENGTH = Byte.BYTES + Integer.BYTES; private final String groupId; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserializer.java index 5a80cd218..878bdbfb1 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/ProtobufGenericDeserializer.java @@ -46,7 +46,7 @@ protected DynamicMessage deserialize(InputStream inputStream, SchemaInfo writerS } private Descriptors.Descriptor parseSchema(SchemaInfo schemaToUse) { - DescriptorProtos.FileDescriptorSet descriptorSet = ProtobufSchema.from(schemaToUse).getDescriptorProto(); + DescriptorProtos.FileDescriptorSet descriptorSet = ProtobufSchema.from(schemaToUse).getFileDescriptorSet(); int count = descriptorSet.getFileCount(); String[] tokens = NameUtil.extractNameAndQualifier(schemaToUse.getType()); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java index 6d6ab58fd..d9aefb4ce 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java @@ -127,7 +127,7 @@ public boolean hasProtobufSchema() { */ @SuppressWarnings("unchecked") public DescriptorProtos.FileDescriptorSet getProtobufSchema() { - return ((ProtobufSchema) schema).getDescriptorProto(); + return ((ProtobufSchema) schema).getFileDescriptorSet(); } /** @@ -143,11 +143,11 @@ public boolean hasJsonSchema() { * Json Schema for the underlying deserialized object. This is available if {@link WithSchema#hasJsonSchema()} returns true. * This means underlying object was serialized as Json. * - * @return Protobuf {@link JsonSchema} representing the schema for the object. + * @return Json schema String representing the schema for the object. */ @SuppressWarnings("unchecked") - public JsonSchema getJsonSchema() { - return ((JSONSchema) schema).getSchema(); + public String getJsonSchema() { + return ((JSONSchema) schema).getSchemaString(); } /** diff --git a/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java b/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java index 07aed6fba..3fff1c381 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java @@ -60,17 +60,17 @@ public void testAvroSchema() { public void testProtobufSchema() throws IOException { ProtobufSchema sm1 = ProtobufSchema.of(ProtobufTest.Message1.class); assertNotNull(sm1.getParser()); - assertNotNull(sm1.getDescriptorProto()); + assertNotNull(sm1.getFileDescriptorSet()); assertEquals(sm1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); ProtobufSchema bm1 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message1.class); assertNotNull(bm1.getParser()); - assertNotNull(bm1.getDescriptorProto()); + assertNotNull(bm1.getFileDescriptorSet()); assertEquals(bm1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); ProtobufSchema bm2 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class); assertNotNull(bm2.getParser()); - assertNotNull(bm2.getDescriptorProto()); + assertNotNull(bm2.getFileDescriptorSet()); assertEquals(bm2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); Path path = Paths.get("src/test/resources/proto/protobufTest.pb"); @@ -79,22 +79,22 @@ public void testProtobufSchema() throws IOException { ProtobufSchema schema = ProtobufSchema.of(ProtobufTest.Message1.class.getName(), descriptorSet); assertNull(schema.getParser()); - assertNotNull(schema.getDescriptorProto()); + assertNotNull(schema.getFileDescriptorSet()); assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); ProtobufSchema schema2 = ProtobufSchema.of(ProtobufTest.Message1.class, descriptorSet); assertNotNull(schema2.getParser()); - assertNotNull(schema2.getDescriptorProto()); + assertNotNull(schema2.getFileDescriptorSet()); assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); ProtobufSchema baseSchema1 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message1.class, descriptorSet); assertNotNull(baseSchema1.getParser()); - assertNotNull(baseSchema1.getDescriptorProto()); + assertNotNull(baseSchema1.getFileDescriptorSet()); assertEquals(baseSchema1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); ProtobufSchema baseSchema2 = ProtobufSchema.ofGeneratedMessageV3(ProtobufTest.Message2.class, descriptorSet); assertNotNull(baseSchema2.getParser()); - assertNotNull(baseSchema2.getDescriptorProto()); + assertNotNull(baseSchema2.getFileDescriptorSet()); assertEquals(baseSchema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Protobuf); } From 9cbaaaee31be205d36edb10271a3cee066747ea5 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 15 Jul 2020 22:22:03 -0700 Subject: [PATCH 64/70] checkstyle Signed-off-by: Shivesh Ranjan --- .../java/io/pravega/schemaregistry/serializers/WithSchema.java | 1 - 1 file changed, 1 deletion(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java index d9aefb4ce..a28853f67 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java @@ -11,7 +11,6 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.module.jsonSchema.JsonSchema; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DynamicMessage; import com.google.protobuf.GeneratedMessageV3; From c0e57beb719a91cf9845ce12a2ab10a05b61545a Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Wed, 15 Jul 2020 22:27:38 -0700 Subject: [PATCH 65/70] PR comment Signed-off-by: Shivesh Ranjan --- .../main/java/io/pravega/schemaregistry/codec/Codecs.java | 7 +++++++ .../java/io/pravega/schemaregistry/codec/CodecTest.java | 1 - 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java index 34fd14cc5..ee07be335 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java @@ -53,6 +53,13 @@ public CodecType getCodecType() { @Override public void encode(ByteBuffer data, ByteArrayOutputStream bos) { + if (data.hasArray()) { + bos.write(data.array(), data.arrayOffset() + data.position(), data.remaining()); + } else { + byte[] b = new byte[data.remaining()]; + data.get(b); + bos.write(b, 0, b.length); + } } @Override diff --git a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java index 646948925..6e86596b1 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/codec/CodecTest.java @@ -46,7 +46,6 @@ public void testCodec() throws IOException { Codec none = Codecs.None.getCodec(); assertEquals(none.getCodecType(), Codecs.None.getCodec().getCodecType()); none.encode(ByteBuffer.wrap(testStringBytes), byteArrayOutputStream); - byteArrayOutputStream.write(testStringBytes); encoded = ByteBuffer.wrap(byteArrayOutputStream.getData().array(), 0, byteArrayOutputStream.getData().getLength()); assertEquals(encoded.remaining(), testStringBytes.length); decoded = none.decode(encoded, ImmutableMap.of()); From 174c4e6ad9be5cd9421941ea06880b6bea1ffe36 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 16 Jul 2020 04:06:27 -0700 Subject: [PATCH 66/70] PR comments Signed-off-by: Shivesh Ranjan --- .../serializers/JsonSerializer.java | 2 - ...ry.java => RegistrySerializerFactory.java} | 2 +- .../serializers/SerializerConfig.java | 11 ++- .../serializers/SerializerTest.java | 78 +++++++++---------- 4 files changed, 50 insertions(+), 43 deletions(-) rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{SerializerFactory.java => RegistrySerializerFactory.java} (99%) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java index 50f4d2ec8..8c896a3ab 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonSerializer.java @@ -27,8 +27,6 @@ class JsonSerializer extends AbstractSerializer { super(groupId, client, schema, encoder, registerSchema, encodeHeader); objectMapper = new ObjectMapper(); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); - objectMapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); - objectMapper.setVisibility(PropertyAccessor.CREATOR, JsonAutoDetect.Visibility.ANY); } @Override diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/RegistrySerializerFactory.java similarity index 99% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/RegistrySerializerFactory.java index 17c82be29..96d98e876 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/RegistrySerializerFactory.java @@ -35,7 +35,7 @@ import static io.pravega.schemaregistry.serializers.WithSchema.NO_TRANSFORM; @Slf4j -public class SerializerFactory { +public class RegistrySerializerFactory { public static final String PRAVEGA_EVENT_HEADER = "PravegaEventHeader"; // region avro diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index 59138180d..4e4931b86 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -36,7 +36,7 @@ import java.util.concurrent.ConcurrentHashMap; /** - * Serializer Config class that is passed to {@link SerializerFactory} for creating serializer. + * Serializer Config class that is passed to {@link RegistrySerializerFactory} for creating serializer. */ @Data @Builder @@ -89,6 +89,15 @@ public class SerializerConfig { private final Decoders decoders; /** * Tells the deserializer that if supplied decoder codecTypes do not match group codecTypes then fail and exit upfront. + * This is important when the writers have used a custom codec for which reader should be instantiated with a corresponding + * decoder otherwise it would fail to decode and read the data. + * As an example, if writer applications had implemented a custom encryption encoder which encrypted the data after + * serializing it, then the data will include an encoding id that will be resolved to the schema and the codec type name + * for the encryption codec. If the readers are not provided with a decoder for all data encoded with that codec type, + * it would fail to decode that data. This flag ensures that the readers check retrieve all the registered codec types + * with the registry service and fail if they are not instantiated with decoders for all the registered codec types. + * + * The default value for this is true. */ private final boolean failOnCodecMismatch; /** diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index 9bbc0f0ce..390c406f1 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -90,24 +90,24 @@ public void testAvroSerializers() { doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); doAnswer(x -> new EncodingInfo(versionInfo3, of.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); - Serializer serializerStr = SerializerFactory.avroSerializer(config, of); + Serializer serializerStr = RegistrySerializerFactory.avroSerializer(config, of); GenericData.EnumSymbol enumSymbol = new GenericData.EnumSymbol(of.getSchema(), "a"); ByteBuffer serialized1 = serializerStr.serialize(enumSymbol); - Serializer deserializer1 = SerializerFactory.avroDeserializer(config, of); + Serializer deserializer1 = RegistrySerializerFactory.avroDeserializer(config, of); Object deserializedEnum = deserializer1.deserialize(serialized1); assertEquals(deserializedEnum, enumSymbol); - Serializer serializer = SerializerFactory.avroSerializer(config, schema1); + Serializer serializer = RegistrySerializerFactory.avroSerializer(config, schema1); Test1 test1 = new Test1("name", 1); ByteBuffer serialized = serializer.serialize(test1); - Serializer deserializer = SerializerFactory.avroDeserializer(config, schema1); + Serializer deserializer = RegistrySerializerFactory.avroDeserializer(config, schema1); Test1 deserialized = deserializer.deserialize(serialized); assertEquals(deserialized, test1); serialized = serializer.serialize(test1); - Serializer genericDeserializer = SerializerFactory.avroGenericDeserializer(config, null); + Serializer genericDeserializer = RegistrySerializerFactory.avroGenericDeserializer(config, null); Object genericDeserialized = genericDeserializer.deserialize(serialized); assertTrue(genericDeserialized instanceof GenericRecord); assertEquals(((GenericRecord) genericDeserialized).get("name").toString(), "name"); @@ -121,9 +121,9 @@ public void testAvroSerializers() { Map, AvroSchema> map = new HashMap<>(); map.put(Test1.class, schema1Base); map.put(Test2.class, schema2Base); - Serializer multiSerializer = SerializerFactory.avroMultiTypeSerializer(config, map); + Serializer multiSerializer = RegistrySerializerFactory.avroMultiTypeSerializer(config, map); serialized = multiSerializer.serialize(test1); - Serializer multiDeserializer = SerializerFactory.avroMultiTypeDeserializer(config, map); + Serializer multiDeserializer = RegistrySerializerFactory.avroMultiTypeDeserializer(config, map); SpecificRecordBase deserialized2 = multiDeserializer.deserialize(serialized); assertEquals(deserialized2, test1); @@ -133,7 +133,7 @@ public void testAvroSerializers() { Map, AvroSchema> map2 = new HashMap<>(); map2.put(Test1.class, schema1Base); - Serializer> fallbackDeserializer = SerializerFactory.avroTypedOrGenericDeserializer(config, map2); + Serializer> fallbackDeserializer = RegistrySerializerFactory.avroTypedOrGenericDeserializer(config, map2); serialized = multiSerializer.serialize(test1); Either fallback = fallbackDeserializer.deserialize(serialized); @@ -164,10 +164,10 @@ public void testAvroSerializersReflect() { doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - Serializer serializer = SerializerFactory.avroSerializer(config, schema1); + Serializer serializer = RegistrySerializerFactory.avroSerializer(config, schema1); ByteBuffer serialized = serializer.serialize(test1); - Serializer deserializer = SerializerFactory.avroDeserializer(config, schema1); + Serializer deserializer = RegistrySerializerFactory.avroDeserializer(config, schema1); TestClass deserialized = deserializer.deserialize(serialized); assertEquals(deserialized, test1); } @@ -194,16 +194,16 @@ public void testProtobufSerializers() throws IOException { doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - Serializer serializer = SerializerFactory.protobufSerializer(config, schema1); + Serializer serializer = RegistrySerializerFactory.protobufSerializer(config, schema1); ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); ByteBuffer serialized = serializer.serialize(message); - Serializer deserializer = SerializerFactory.protobufDeserializer(config, schema1); + Serializer deserializer = RegistrySerializerFactory.protobufDeserializer(config, schema1); ProtobufTest.Message2 deserialized = deserializer.deserialize(serialized); assertEquals(deserialized, message); serialized = serializer.serialize(message); - Serializer genericDeserializer = SerializerFactory.protobufGenericDeserializer(config, null); + Serializer genericDeserializer = RegistrySerializerFactory.protobufGenericDeserializer(config, null); DynamicMessage generic = genericDeserializer.deserialize(serialized); assertEquals(generic.getAllFields().size(), 2); @@ -215,9 +215,9 @@ public void testProtobufSerializers() throws IOException { Map, ProtobufSchema> map = new HashMap<>(); map.put(ProtobufTest.Message2.class, schema1Base); map.put(ProtobufTest.Message3.class, schema2Base); - Serializer multiSerializer = SerializerFactory.protobufMultiTypeSerializer(config, map); + Serializer multiSerializer = RegistrySerializerFactory.protobufMultiTypeSerializer(config, map); serialized = multiSerializer.serialize(message); - Serializer multiDeserializer = SerializerFactory.protobufMultiTypeDeserializer(config, map); + Serializer multiDeserializer = RegistrySerializerFactory.protobufMultiTypeDeserializer(config, map); GeneratedMessageV3 deserialized2 = multiDeserializer.deserialize(serialized); assertEquals(deserialized2, message); @@ -227,7 +227,7 @@ public void testProtobufSerializers() throws IOException { Map, ProtobufSchema> map2 = new HashMap<>(); map2.put(ProtobufTest.Message2.class, schema1Base); - Serializer> fallbackDeserializer = SerializerFactory.protobufTypedOrGenericDeserializer(config, map2); + Serializer> fallbackDeserializer = RegistrySerializerFactory.protobufTypedOrGenericDeserializer(config, map2); serialized = multiSerializer.serialize(message); Either fallback = fallbackDeserializer.deserialize(serialized); assertTrue(fallback.isLeft()); @@ -258,22 +258,22 @@ public void testJsonSerializers() throws JsonProcessingException { doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - Serializer serializer = SerializerFactory.jsonSerializer(config, schema1); + Serializer serializer = RegistrySerializerFactory.jsonSerializer(config, schema1); DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); ByteBuffer serialized = serializer.serialize(user1); - Serializer deserializer = SerializerFactory.jsonDeserializer(config, schema1); + Serializer deserializer = RegistrySerializerFactory.jsonDeserializer(config, schema1); DerivedUser1 deserialized = deserializer.deserialize(serialized); assertEquals(deserialized, user1); serialized = serializer.serialize(user1); - Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + Serializer> genericDeserializer = RegistrySerializerFactory.jsonGenericDeserializer(config); WithSchema generic = genericDeserializer.deserialize(serialized); assertEquals(((JSONSchema) generic.getSchema()).getSchema(), schema1.getSchema()); assertEquals(((JsonNode) generic.getObject()).size(), 4); serialized = serializer.serialize(user1); - Serializer stringDeserializer = SerializerFactory.jsonStringDeserializer(config); + Serializer stringDeserializer = RegistrySerializerFactory.jsonStringDeserializer(config); String str = stringDeserializer.deserialize(serialized); assertFalse(Strings.isNullOrEmpty(str)); @@ -285,7 +285,7 @@ public void testJsonSerializers() throws JsonProcessingException { doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); doAnswer(x -> new EncodingInfo(versionInfo3, myData.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); - Serializer serializer2 = SerializerFactory.jsonSerializer(config, myData); + Serializer serializer2 = RegistrySerializerFactory.jsonSerializer(config, myData); Map jsonObject = new HashMap<>(); jsonObject.put("content", "mxx"); @@ -300,9 +300,9 @@ public void testJsonSerializers() throws JsonProcessingException { doAnswer(x -> new EncodingId(3)).when(client).getEncodingId(anyString(), eq(versionInfo4), any()); doAnswer(x -> new EncodingInfo(versionInfo4, strSchema.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(3))); - Serializer serializer3 = SerializerFactory.jsonSerializer(config, strSchema); - Serializer deserializer3 = SerializerFactory.jsonDeserializer(config, strSchema); - Serializer> generic3 = SerializerFactory.jsonGenericDeserializer(config); + Serializer serializer3 = RegistrySerializerFactory.jsonSerializer(config, strSchema); + Serializer deserializer3 = RegistrySerializerFactory.jsonDeserializer(config, strSchema); + Serializer> generic3 = RegistrySerializerFactory.jsonGenericDeserializer(config); String string = "a"; s = serializer3.serialize(string); Object x = deserializer3.deserialize(s); @@ -320,9 +320,9 @@ public void testJsonSerializers() throws JsonProcessingException { Map, JSONSchema> map = new HashMap<>(); map.put(DerivedUser1.class, schema1Base); map.put(DerivedUser2.class, schema2Base); - Serializer multiSerializer = SerializerFactory.jsonMultiTypeSerializer(config, map); + Serializer multiSerializer = RegistrySerializerFactory.jsonMultiTypeSerializer(config, map); serialized = multiSerializer.serialize(user1); - Serializer multiDeserializer = SerializerFactory.jsonMultiTypeDeserializer(config, map); + Serializer multiDeserializer = RegistrySerializerFactory.jsonMultiTypeDeserializer(config, map); Object deserialized2 = multiDeserializer.deserialize(serialized); assertEquals(deserialized2, user1); @@ -332,7 +332,7 @@ public void testJsonSerializers() throws JsonProcessingException { Map, JSONSchema> map2 = new HashMap<>(); map2.put(DerivedUser1.class, schema1Base); - Serializer>> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); + Serializer>> fallbackDeserializer = RegistrySerializerFactory.jsonTypedOrGenericDeserializer(config, map2); serialized = multiSerializer.serialize(user1); Either> fallback = fallbackDeserializer.deserialize(serialized); assertTrue(fallback.isLeft()); @@ -373,19 +373,19 @@ public void testMultiformatDeserializers() throws IOException { doAnswer(x -> new EncodingInfo(versionInfo3, schema3.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - Serializer avroSerializer = SerializerFactory.avroSerializer(config, schema1); + Serializer avroSerializer = RegistrySerializerFactory.avroSerializer(config, schema1); Test1 test1 = new Test1("name", 1); ByteBuffer serializedAvro = avroSerializer.serialize(test1); - Serializer protobufSerializer = SerializerFactory.protobufSerializer(config, schema2); + Serializer protobufSerializer = RegistrySerializerFactory.protobufSerializer(config, schema2); ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); ByteBuffer serializedProto = protobufSerializer.serialize(message); - Serializer jsonSerializer = SerializerFactory.jsonSerializer(config, schema3); + Serializer jsonSerializer = RegistrySerializerFactory.jsonSerializer(config, schema3); DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); ByteBuffer serializedJson = jsonSerializer.serialize(user1); - Serializer deserializer = SerializerFactory.genericDeserializer(config); + Serializer deserializer = RegistrySerializerFactory.genericDeserializer(config); Object deserialized = deserializer.deserialize(serializedAvro); assertTrue(deserialized instanceof GenericRecord); deserialized = deserializer.deserialize(serializedProto); @@ -393,7 +393,7 @@ public void testMultiformatDeserializers() throws IOException { deserialized = deserializer.deserialize(serializedJson); assertTrue(deserialized instanceof WithSchema); - Serializer jsonStringDeserializer = SerializerFactory.deserializeAsJsonString(config); + Serializer jsonStringDeserializer = RegistrySerializerFactory.deserializeAsJsonString(config); serializedAvro.position(0); String jsonString = jsonStringDeserializer.deserialize(serializedAvro); assertNotNull(jsonString); @@ -423,24 +423,24 @@ public void testNoEncodingProto() throws IOException { doAnswer(x -> new SchemaWithVersion(schema1.getSchemaInfo(), versionInfo1)).when(client).getLatestSchemaVersion(anyString(), any()); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - Serializer serializer = SerializerFactory.protobufSerializer(config, schema1); + Serializer serializer = RegistrySerializerFactory.protobufSerializer(config, schema1); verify(client, never()).getEncodingId(anyString(), any(), any()); ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); ByteBuffer serialized = serializer.serialize(message); - Serializer deserializer = SerializerFactory.protobufDeserializer(config, schema1); + Serializer deserializer = RegistrySerializerFactory.protobufDeserializer(config, schema1); verify(client, never()).getEncodingInfo(anyString(), any()); ProtobufTest.Message2 deserialized = deserializer.deserialize(serialized); assertEquals(deserialized, message); serialized = serializer.serialize(message); - AssertExtensions.assertThrows(IllegalArgumentException.class, () -> SerializerFactory.protobufGenericDeserializer(config, null)); + AssertExtensions.assertThrows(IllegalArgumentException.class, () -> RegistrySerializerFactory.protobufGenericDeserializer(config, null)); SchemaInfo latestSchema = client.getLatestSchemaVersion("groupId", null).getSchemaInfo(); ProtobufSchema schemaDynamic = ProtobufSchema.of(latestSchema.getType(), descriptorSet); - Serializer genericDeserializer = SerializerFactory.protobufGenericDeserializer(config, schemaDynamic); + Serializer genericDeserializer = RegistrySerializerFactory.protobufGenericDeserializer(config, schemaDynamic); DynamicMessage generic = genericDeserializer.deserialize(serialized); assertEquals(generic.getAllFields().size(), 2); @@ -461,19 +461,19 @@ public void testNoEncodingJson() throws IOException { doAnswer(x -> new SchemaWithVersion(schema1.getSchemaInfo(), versionInfo1)).when(client).getLatestSchemaVersion(anyString(), any()); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - Serializer serializer = SerializerFactory.jsonSerializer(config, schema1); + Serializer serializer = RegistrySerializerFactory.jsonSerializer(config, schema1); verify(client, never()).getEncodingId(anyString(), any(), any()); DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); ByteBuffer serialized = serializer.serialize(user1); - Serializer deserializer = SerializerFactory.jsonDeserializer(config, schema1); + Serializer deserializer = RegistrySerializerFactory.jsonDeserializer(config, schema1); verify(client, never()).getEncodingInfo(anyString(), any()); DerivedUser1 deserialized = deserializer.deserialize(serialized); assertEquals(deserialized, user1); serialized = serializer.serialize(user1); - Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); + Serializer> genericDeserializer = RegistrySerializerFactory.jsonGenericDeserializer(config); WithSchema generic = genericDeserializer.deserialize(serialized); assertNotNull(generic.getObject()); From fe0974f7422d4dcba9970d96dee3e8f7c6fc5ac3 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 16 Jul 2020 04:14:37 -0700 Subject: [PATCH 67/70] revert rename of registry serializer factory Signed-off-by: Shivesh Ranjan --- .../serializers/SerializerConfig.java | 2 +- ...zerFactory.java => SerializerFactory.java} | 4 +- .../serializers/SerializerTest.java | 78 +++++++++---------- 3 files changed, 41 insertions(+), 43 deletions(-) rename serializers/src/main/java/io/pravega/schemaregistry/serializers/{RegistrySerializerFactory.java => SerializerFactory.java} (99%) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java index 4e4931b86..a9c648bdb 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerConfig.java @@ -36,7 +36,7 @@ import java.util.concurrent.ConcurrentHashMap; /** - * Serializer Config class that is passed to {@link RegistrySerializerFactory} for creating serializer. + * Serializer Config class that is passed to {@link SerializerFactory} for creating serializer. */ @Data @Builder diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/RegistrySerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java similarity index 99% rename from serializers/src/main/java/io/pravega/schemaregistry/serializers/RegistrySerializerFactory.java rename to serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java index 96d98e876..e2467ddd0 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/RegistrySerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactory.java @@ -35,9 +35,7 @@ import static io.pravega.schemaregistry.serializers.WithSchema.NO_TRANSFORM; @Slf4j -public class RegistrySerializerFactory { - public static final String PRAVEGA_EVENT_HEADER = "PravegaEventHeader"; - +public class SerializerFactory { // region avro /** * Creates a typed avro serializer for the Schema. The serializer implementation returned from this method is diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index 390c406f1..9bbc0f0ce 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -90,24 +90,24 @@ public void testAvroSerializers() { doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); doAnswer(x -> new EncodingInfo(versionInfo3, of.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); - Serializer serializerStr = RegistrySerializerFactory.avroSerializer(config, of); + Serializer serializerStr = SerializerFactory.avroSerializer(config, of); GenericData.EnumSymbol enumSymbol = new GenericData.EnumSymbol(of.getSchema(), "a"); ByteBuffer serialized1 = serializerStr.serialize(enumSymbol); - Serializer deserializer1 = RegistrySerializerFactory.avroDeserializer(config, of); + Serializer deserializer1 = SerializerFactory.avroDeserializer(config, of); Object deserializedEnum = deserializer1.deserialize(serialized1); assertEquals(deserializedEnum, enumSymbol); - Serializer serializer = RegistrySerializerFactory.avroSerializer(config, schema1); + Serializer serializer = SerializerFactory.avroSerializer(config, schema1); Test1 test1 = new Test1("name", 1); ByteBuffer serialized = serializer.serialize(test1); - Serializer deserializer = RegistrySerializerFactory.avroDeserializer(config, schema1); + Serializer deserializer = SerializerFactory.avroDeserializer(config, schema1); Test1 deserialized = deserializer.deserialize(serialized); assertEquals(deserialized, test1); serialized = serializer.serialize(test1); - Serializer genericDeserializer = RegistrySerializerFactory.avroGenericDeserializer(config, null); + Serializer genericDeserializer = SerializerFactory.avroGenericDeserializer(config, null); Object genericDeserialized = genericDeserializer.deserialize(serialized); assertTrue(genericDeserialized instanceof GenericRecord); assertEquals(((GenericRecord) genericDeserialized).get("name").toString(), "name"); @@ -121,9 +121,9 @@ public void testAvroSerializers() { Map, AvroSchema> map = new HashMap<>(); map.put(Test1.class, schema1Base); map.put(Test2.class, schema2Base); - Serializer multiSerializer = RegistrySerializerFactory.avroMultiTypeSerializer(config, map); + Serializer multiSerializer = SerializerFactory.avroMultiTypeSerializer(config, map); serialized = multiSerializer.serialize(test1); - Serializer multiDeserializer = RegistrySerializerFactory.avroMultiTypeDeserializer(config, map); + Serializer multiDeserializer = SerializerFactory.avroMultiTypeDeserializer(config, map); SpecificRecordBase deserialized2 = multiDeserializer.deserialize(serialized); assertEquals(deserialized2, test1); @@ -133,7 +133,7 @@ public void testAvroSerializers() { Map, AvroSchema> map2 = new HashMap<>(); map2.put(Test1.class, schema1Base); - Serializer> fallbackDeserializer = RegistrySerializerFactory.avroTypedOrGenericDeserializer(config, map2); + Serializer> fallbackDeserializer = SerializerFactory.avroTypedOrGenericDeserializer(config, map2); serialized = multiSerializer.serialize(test1); Either fallback = fallbackDeserializer.deserialize(serialized); @@ -164,10 +164,10 @@ public void testAvroSerializersReflect() { doAnswer(x -> new EncodingInfo(versionInfo1, schema1.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(0))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - Serializer serializer = RegistrySerializerFactory.avroSerializer(config, schema1); + Serializer serializer = SerializerFactory.avroSerializer(config, schema1); ByteBuffer serialized = serializer.serialize(test1); - Serializer deserializer = RegistrySerializerFactory.avroDeserializer(config, schema1); + Serializer deserializer = SerializerFactory.avroDeserializer(config, schema1); TestClass deserialized = deserializer.deserialize(serialized); assertEquals(deserialized, test1); } @@ -194,16 +194,16 @@ public void testProtobufSerializers() throws IOException { doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - Serializer serializer = RegistrySerializerFactory.protobufSerializer(config, schema1); + Serializer serializer = SerializerFactory.protobufSerializer(config, schema1); ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); ByteBuffer serialized = serializer.serialize(message); - Serializer deserializer = RegistrySerializerFactory.protobufDeserializer(config, schema1); + Serializer deserializer = SerializerFactory.protobufDeserializer(config, schema1); ProtobufTest.Message2 deserialized = deserializer.deserialize(serialized); assertEquals(deserialized, message); serialized = serializer.serialize(message); - Serializer genericDeserializer = RegistrySerializerFactory.protobufGenericDeserializer(config, null); + Serializer genericDeserializer = SerializerFactory.protobufGenericDeserializer(config, null); DynamicMessage generic = genericDeserializer.deserialize(serialized); assertEquals(generic.getAllFields().size(), 2); @@ -215,9 +215,9 @@ public void testProtobufSerializers() throws IOException { Map, ProtobufSchema> map = new HashMap<>(); map.put(ProtobufTest.Message2.class, schema1Base); map.put(ProtobufTest.Message3.class, schema2Base); - Serializer multiSerializer = RegistrySerializerFactory.protobufMultiTypeSerializer(config, map); + Serializer multiSerializer = SerializerFactory.protobufMultiTypeSerializer(config, map); serialized = multiSerializer.serialize(message); - Serializer multiDeserializer = RegistrySerializerFactory.protobufMultiTypeDeserializer(config, map); + Serializer multiDeserializer = SerializerFactory.protobufMultiTypeDeserializer(config, map); GeneratedMessageV3 deserialized2 = multiDeserializer.deserialize(serialized); assertEquals(deserialized2, message); @@ -227,7 +227,7 @@ public void testProtobufSerializers() throws IOException { Map, ProtobufSchema> map2 = new HashMap<>(); map2.put(ProtobufTest.Message2.class, schema1Base); - Serializer> fallbackDeserializer = RegistrySerializerFactory.protobufTypedOrGenericDeserializer(config, map2); + Serializer> fallbackDeserializer = SerializerFactory.protobufTypedOrGenericDeserializer(config, map2); serialized = multiSerializer.serialize(message); Either fallback = fallbackDeserializer.deserialize(serialized); assertTrue(fallback.isLeft()); @@ -258,22 +258,22 @@ public void testJsonSerializers() throws JsonProcessingException { doAnswer(x -> new EncodingInfo(versionInfo2, schema2.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(1))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - Serializer serializer = RegistrySerializerFactory.jsonSerializer(config, schema1); + Serializer serializer = SerializerFactory.jsonSerializer(config, schema1); DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); ByteBuffer serialized = serializer.serialize(user1); - Serializer deserializer = RegistrySerializerFactory.jsonDeserializer(config, schema1); + Serializer deserializer = SerializerFactory.jsonDeserializer(config, schema1); DerivedUser1 deserialized = deserializer.deserialize(serialized); assertEquals(deserialized, user1); serialized = serializer.serialize(user1); - Serializer> genericDeserializer = RegistrySerializerFactory.jsonGenericDeserializer(config); + Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); WithSchema generic = genericDeserializer.deserialize(serialized); assertEquals(((JSONSchema) generic.getSchema()).getSchema(), schema1.getSchema()); assertEquals(((JsonNode) generic.getObject()).size(), 4); serialized = serializer.serialize(user1); - Serializer stringDeserializer = RegistrySerializerFactory.jsonStringDeserializer(config); + Serializer stringDeserializer = SerializerFactory.jsonStringDeserializer(config); String str = stringDeserializer.deserialize(serialized); assertFalse(Strings.isNullOrEmpty(str)); @@ -285,7 +285,7 @@ public void testJsonSerializers() throws JsonProcessingException { doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); doAnswer(x -> new EncodingInfo(versionInfo3, myData.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); - Serializer serializer2 = RegistrySerializerFactory.jsonSerializer(config, myData); + Serializer serializer2 = SerializerFactory.jsonSerializer(config, myData); Map jsonObject = new HashMap<>(); jsonObject.put("content", "mxx"); @@ -300,9 +300,9 @@ public void testJsonSerializers() throws JsonProcessingException { doAnswer(x -> new EncodingId(3)).when(client).getEncodingId(anyString(), eq(versionInfo4), any()); doAnswer(x -> new EncodingInfo(versionInfo4, strSchema.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(3))); - Serializer serializer3 = RegistrySerializerFactory.jsonSerializer(config, strSchema); - Serializer deserializer3 = RegistrySerializerFactory.jsonDeserializer(config, strSchema); - Serializer> generic3 = RegistrySerializerFactory.jsonGenericDeserializer(config); + Serializer serializer3 = SerializerFactory.jsonSerializer(config, strSchema); + Serializer deserializer3 = SerializerFactory.jsonDeserializer(config, strSchema); + Serializer> generic3 = SerializerFactory.jsonGenericDeserializer(config); String string = "a"; s = serializer3.serialize(string); Object x = deserializer3.deserialize(s); @@ -320,9 +320,9 @@ public void testJsonSerializers() throws JsonProcessingException { Map, JSONSchema> map = new HashMap<>(); map.put(DerivedUser1.class, schema1Base); map.put(DerivedUser2.class, schema2Base); - Serializer multiSerializer = RegistrySerializerFactory.jsonMultiTypeSerializer(config, map); + Serializer multiSerializer = SerializerFactory.jsonMultiTypeSerializer(config, map); serialized = multiSerializer.serialize(user1); - Serializer multiDeserializer = RegistrySerializerFactory.jsonMultiTypeDeserializer(config, map); + Serializer multiDeserializer = SerializerFactory.jsonMultiTypeDeserializer(config, map); Object deserialized2 = multiDeserializer.deserialize(serialized); assertEquals(deserialized2, user1); @@ -332,7 +332,7 @@ public void testJsonSerializers() throws JsonProcessingException { Map, JSONSchema> map2 = new HashMap<>(); map2.put(DerivedUser1.class, schema1Base); - Serializer>> fallbackDeserializer = RegistrySerializerFactory.jsonTypedOrGenericDeserializer(config, map2); + Serializer>> fallbackDeserializer = SerializerFactory.jsonTypedOrGenericDeserializer(config, map2); serialized = multiSerializer.serialize(user1); Either> fallback = fallbackDeserializer.deserialize(serialized); assertTrue(fallback.isLeft()); @@ -373,19 +373,19 @@ public void testMultiformatDeserializers() throws IOException { doAnswer(x -> new EncodingInfo(versionInfo3, schema3.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - Serializer avroSerializer = RegistrySerializerFactory.avroSerializer(config, schema1); + Serializer avroSerializer = SerializerFactory.avroSerializer(config, schema1); Test1 test1 = new Test1("name", 1); ByteBuffer serializedAvro = avroSerializer.serialize(test1); - Serializer protobufSerializer = RegistrySerializerFactory.protobufSerializer(config, schema2); + Serializer protobufSerializer = SerializerFactory.protobufSerializer(config, schema2); ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); ByteBuffer serializedProto = protobufSerializer.serialize(message); - Serializer jsonSerializer = RegistrySerializerFactory.jsonSerializer(config, schema3); + Serializer jsonSerializer = SerializerFactory.jsonSerializer(config, schema3); DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); ByteBuffer serializedJson = jsonSerializer.serialize(user1); - Serializer deserializer = RegistrySerializerFactory.genericDeserializer(config); + Serializer deserializer = SerializerFactory.genericDeserializer(config); Object deserialized = deserializer.deserialize(serializedAvro); assertTrue(deserialized instanceof GenericRecord); deserialized = deserializer.deserialize(serializedProto); @@ -393,7 +393,7 @@ public void testMultiformatDeserializers() throws IOException { deserialized = deserializer.deserialize(serializedJson); assertTrue(deserialized instanceof WithSchema); - Serializer jsonStringDeserializer = RegistrySerializerFactory.deserializeAsJsonString(config); + Serializer jsonStringDeserializer = SerializerFactory.deserializeAsJsonString(config); serializedAvro.position(0); String jsonString = jsonStringDeserializer.deserialize(serializedAvro); assertNotNull(jsonString); @@ -423,24 +423,24 @@ public void testNoEncodingProto() throws IOException { doAnswer(x -> new SchemaWithVersion(schema1.getSchemaInfo(), versionInfo1)).when(client).getLatestSchemaVersion(anyString(), any()); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - Serializer serializer = RegistrySerializerFactory.protobufSerializer(config, schema1); + Serializer serializer = SerializerFactory.protobufSerializer(config, schema1); verify(client, never()).getEncodingId(anyString(), any(), any()); ProtobufTest.Message2 message = ProtobufTest.Message2.newBuilder().setName("name").setField1(1).build(); ByteBuffer serialized = serializer.serialize(message); - Serializer deserializer = RegistrySerializerFactory.protobufDeserializer(config, schema1); + Serializer deserializer = SerializerFactory.protobufDeserializer(config, schema1); verify(client, never()).getEncodingInfo(anyString(), any()); ProtobufTest.Message2 deserialized = deserializer.deserialize(serialized); assertEquals(deserialized, message); serialized = serializer.serialize(message); - AssertExtensions.assertThrows(IllegalArgumentException.class, () -> RegistrySerializerFactory.protobufGenericDeserializer(config, null)); + AssertExtensions.assertThrows(IllegalArgumentException.class, () -> SerializerFactory.protobufGenericDeserializer(config, null)); SchemaInfo latestSchema = client.getLatestSchemaVersion("groupId", null).getSchemaInfo(); ProtobufSchema schemaDynamic = ProtobufSchema.of(latestSchema.getType(), descriptorSet); - Serializer genericDeserializer = RegistrySerializerFactory.protobufGenericDeserializer(config, schemaDynamic); + Serializer genericDeserializer = SerializerFactory.protobufGenericDeserializer(config, schemaDynamic); DynamicMessage generic = genericDeserializer.deserialize(serialized); assertEquals(generic.getAllFields().size(), 2); @@ -461,19 +461,19 @@ public void testNoEncodingJson() throws IOException { doAnswer(x -> new SchemaWithVersion(schema1.getSchemaInfo(), versionInfo1)).when(client).getLatestSchemaVersion(anyString(), any()); doAnswer(x -> true).when(client).canReadUsing(anyString(), any()); - Serializer serializer = RegistrySerializerFactory.jsonSerializer(config, schema1); + Serializer serializer = SerializerFactory.jsonSerializer(config, schema1); verify(client, never()).getEncodingId(anyString(), any(), any()); DerivedUser1 user1 = new DerivedUser1("user", new Address("street", "city"), 2, "user1"); ByteBuffer serialized = serializer.serialize(user1); - Serializer deserializer = RegistrySerializerFactory.jsonDeserializer(config, schema1); + Serializer deserializer = SerializerFactory.jsonDeserializer(config, schema1); verify(client, never()).getEncodingInfo(anyString(), any()); DerivedUser1 deserialized = deserializer.deserialize(serialized); assertEquals(deserialized, user1); serialized = serializer.serialize(user1); - Serializer> genericDeserializer = RegistrySerializerFactory.jsonGenericDeserializer(config); + Serializer> genericDeserializer = SerializerFactory.jsonGenericDeserializer(config); WithSchema generic = genericDeserializer.deserialize(serialized); assertNotNull(generic.getObject()); From d464b6b8e191edc428260670e0427044d3ea405d Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Thu, 16 Jul 2020 23:58:53 -0700 Subject: [PATCH 68/70] PR comment - generic T on JSONSchema Signed-off-by: Shivesh Ranjan --- .../schemaregistry/schemas/JSONSchema.java | 17 ++++++++++------- .../serializers/JsonGenericDeserializer.java | 2 -- .../MultiFormatSerializerFactory.java | 7 ++++--- .../schemaregistry/serializers/WithSchema.java | 7 +++++++ .../schemaregistry/schemas/SchemasTest.java | 5 ++--- .../serializers/SerializerTest.java | 14 +++++++------- 6 files changed, 30 insertions(+), 22 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java index b7a252a92..55a90cf80 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -10,6 +10,7 @@ package io.pravega.schemaregistry.schemas; import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.module.jsonSchema.JsonSchema; import com.fasterxml.jackson.module.jsonSchema.JsonSchemaGenerator; @@ -105,18 +106,20 @@ public static JSONSchema of(String type, JsonSchema schema) { } /** - * Method to create a typed JSONSchema of type {@link Object} from the given schema string. + * Method to create a typed JSONSchema of type T from the given schema string. * * @param type type of object identified by {@link SchemaInfo#getType()}. * @param schemaString Schema string to use. + * @param tClass class for the type of object + * @param Type of object * @return Returns an JSONSchema with {@link Object} type. */ - public static JSONSchema of(String type, String schemaString) { + public static JSONSchema of(String type, String schemaString, Class tClass) { Preconditions.checkNotNull(type, "Type cannot be null."); Preconditions.checkArgument(!Strings.isNullOrEmpty(schemaString), "Schema String cannot be null or empty."); try { JsonSchema schema = OBJECT_MAPPER.readValue(schemaString, JsonSchema.class); - return new JSONSchema<>(schema, type, schemaString, Object.class); + return new JSONSchema<>(schema, type, schemaString, tClass); } catch (JsonProcessingException e) { throw new IllegalArgumentException("Unable to parse schema string", e); } @@ -147,18 +150,18 @@ public static JSONSchema ofBaseType(Class tDerived, Class } /** - * Method to create a typed JSONSchema of type {@link Object} from the given schema. + * Method to create a typed JSONSchema of type {@link JsonNode} from the given schema. * * @param schemaInfo Schema info to translate into json schema. - * @return Returns an JSONSchema with {@link Object} type. + * @return Returns an JSONSchema with {@link JsonNode} type. */ - public static JSONSchema from(SchemaInfo schemaInfo) { + public static JSONSchema from(SchemaInfo schemaInfo) { Preconditions.checkNotNull(schemaInfo); try { String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); JsonSchema schema = OBJECT_MAPPER.readValue(schemaString, JsonSchema.class); - return new JSONSchema<>(schemaInfo, schema, schemaString, Object.class); + return new JSONSchema<>(schemaInfo, schema, schemaString, JsonNode.class); } catch (JsonProcessingException e) { throw new IllegalArgumentException("Unable to get json schema from schema info", e); } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java index d1d955af7..29daaa05d 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/JsonGenericDeserializer.java @@ -27,8 +27,6 @@ class JsonGenericDeserializer extends AbstractDeserializer> super(groupId, client, null, false, decoders, encodingCache, encodeHeader); this.objectMapper = new ObjectMapper(); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); - objectMapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); - objectMapper.setVisibility(PropertyAccessor.CREATOR, JsonAutoDetect.Visibility.ANY); } @Override diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java index abe8798b3..bee87cb01 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/MultiFormatSerializerFactory.java @@ -167,12 +167,13 @@ private static AbstractSerializer getPravegaSerializer( return new AvroSerializer<>(groupId, schemaRegistryClient, AvroSchema.from(schemaInfo), config.getEncoder(), config.isRegisterSchema()); case Protobuf: - ProtobufSerializer m = new ProtobufSerializer<>(groupId, schemaRegistryClient, + ProtobufSerializer pSerializer = new ProtobufSerializer<>(groupId, schemaRegistryClient, ProtobufSchema.from(schemaInfo), config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()); - return (AbstractSerializer) m; + return (AbstractSerializer) pSerializer; case Json: - return new JsonSerializer<>(groupId, schemaRegistryClient, JSONSchema.from(schemaInfo), + JsonSerializer jsonSerializer = new JsonSerializer<>(groupId, schemaRegistryClient, JSONSchema.from(schemaInfo), config.getEncoder(), config.isRegisterSchema(), config.isWriteEncodingHeader()); + return (AbstractSerializer) jsonSerializer; case Custom: return getCustomSerializer(config, customSerializers, schemaRegistryClient, groupId, schemaInfo); default: diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java index a28853f67..25b0b9d19 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Preconditions; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DynamicMessage; import com.google.protobuf.GeneratedMessageV3; @@ -212,6 +213,8 @@ private static String toJsonString(SerializationFormat format, Object deserializ * @return A WithSchema object which has Avro Schema and the corresponding object. */ public static WithSchema avro(T object, AvroSchema avroSchema) { + Preconditions.checkNotNull(object, "object cannot be null"); + Preconditions.checkNotNull(avroSchema, "schema cannot be null"); return new WithSchema<>(avroSchema.getSchemaInfo(), object, (x, y) -> object); } @@ -224,6 +227,8 @@ public static WithSchema avro(T object, AvroSchema avroSchema) { * @return A WithSchema object which has Protobuf Schema and the corresponding object. */ public static WithSchema proto(T object, ProtobufSchema protobufSchema) { + Preconditions.checkNotNull(object, "object cannot be null"); + Preconditions.checkNotNull(protobufSchema, "schema cannot be null"); return new WithSchema<>(protobufSchema.getSchemaInfo(), object, (x, y) -> object); } @@ -236,6 +241,8 @@ public static WithSchema proto(T object, Proto * @return A WithSchema object which has Json schema and the corresponding object. */ public static WithSchema json(T object, JSONSchema jsonSchema) { + Preconditions.checkNotNull(object, "object cannot be null"); + Preconditions.checkNotNull(jsonSchema, "schema cannot be null"); return new WithSchema<>(jsonSchema.getSchemaInfo(), object, (x, y) -> object); } } diff --git a/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java b/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java index 3fff1c381..f3a6cba01 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java @@ -9,7 +9,6 @@ */ package io.pravega.schemaregistry.schemas; -import com.fasterxml.jackson.core.JsonProcessingException; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DynamicMessage; import com.google.protobuf.GeneratedMessageV3; @@ -99,12 +98,12 @@ public void testProtobufSchema() throws IOException { } @Test - public void testJsonSchema() throws JsonProcessingException { + public void testJsonSchema() { JSONSchema schema = JSONSchema.of(User.class); assertNotNull(schema.getSchema()); assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); - JSONSchema schema2 = JSONSchema.of("Person", JSON_SCHEMA_STRING); + JSONSchema schema2 = JSONSchema.of("Person", JSON_SCHEMA_STRING, Object.class); assertNotNull(schema2.getSchema()); assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java index 9bbc0f0ce..8ee567058 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/serializers/SerializerTest.java @@ -279,14 +279,14 @@ public void testJsonSerializers() throws JsonProcessingException { String schemaString = "{\"type\": \"object\",\"title\": \"The external data schema\",\"properties\": {\"content\": {\"type\": \"string\"}}}"; - JSONSchema myData = JSONSchema.of("MyData", schemaString); + JSONSchema myData = JSONSchema.of("MyData", schemaString, HashMap.class); VersionInfo versionInfo3 = new VersionInfo("myData", 0, 2); doAnswer(x -> versionInfo3).when(client).getVersionForSchema(anyString(), eq(myData.getSchemaInfo())); doAnswer(x -> new EncodingId(2)).when(client).getEncodingId(anyString(), eq(versionInfo3), any()); doAnswer(x -> new EncodingInfo(versionInfo3, myData.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(2))); - Serializer serializer2 = SerializerFactory.jsonSerializer(config, myData); - Map jsonObject = new HashMap<>(); + Serializer serializer2 = SerializerFactory.jsonSerializer(config, myData); + HashMap jsonObject = new HashMap<>(); jsonObject.put("content", "mxx"); ByteBuffer s = serializer2.serialize(jsonObject); @@ -294,19 +294,19 @@ public void testJsonSerializers() throws JsonProcessingException { String stringSchema = new ObjectMapper().writeValueAsString(JsonSchema.minimalForFormat(JsonFormatTypes.STRING)); - JSONSchema strSchema = JSONSchema.of("string", stringSchema); + JSONSchema strSchema = JSONSchema.of("string", stringSchema, String.class); VersionInfo versionInfo4 = new VersionInfo("myData", 0, 3); doAnswer(x -> versionInfo4).when(client).getVersionForSchema(anyString(), eq(strSchema.getSchemaInfo())); doAnswer(x -> new EncodingId(3)).when(client).getEncodingId(anyString(), eq(versionInfo4), any()); doAnswer(x -> new EncodingInfo(versionInfo4, strSchema.getSchemaInfo(), Codecs.None.getCodec().getCodecType())).when(client).getEncodingInfo(anyString(), eq(new EncodingId(3))); - Serializer serializer3 = SerializerFactory.jsonSerializer(config, strSchema); - Serializer deserializer3 = SerializerFactory.jsonDeserializer(config, strSchema); + Serializer serializer3 = SerializerFactory.jsonSerializer(config, strSchema); + Serializer deserializer3 = SerializerFactory.jsonDeserializer(config, strSchema); Serializer> generic3 = SerializerFactory.jsonGenericDeserializer(config); String string = "a"; s = serializer3.serialize(string); Object x = deserializer3.deserialize(s); - assertTrue(x instanceof String); + assertNotNull(x); assertEquals(x, string); s = serializer3.serialize(string); Object jsonNode = generic3.deserialize(s); From 9137aa6682588278ace4200a2ca4bdc4d95ec279 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Fri, 17 Jul 2020 01:06:43 -0700 Subject: [PATCH 69/70] Merge with master, use everit json schema Signed-off-by: Shivesh Ranjan --- build.gradle | 5 ++ checkstyle/import-control.xml | 2 + gradle.properties | 1 + .../schemaregistry/schemas/JSONSchema.java | 66 +++++++++++-------- .../service/SchemaRegistryService.java | 49 +++++++++++++- 5 files changed, 92 insertions(+), 31 deletions(-) diff --git a/build.gradle b/build.gradle index 754d8d5fc..ab723b7dd 100644 --- a/build.gradle +++ b/build.gradle @@ -84,6 +84,9 @@ allprojects { maven { url "https://repository.apache.org/snapshots" } + maven { + url 'https://repository.mulesoft.org/nexus/content/repositories/public' + } } gradle.projectsEvaluated { @@ -222,6 +225,7 @@ project('serializers') { compile group: 'io.pravega', name: 'pravega-client', version: pravegaVersion compile group: 'org.xerial.snappy', name: 'snappy-java', version: snappyVersion compile group: 'com.fasterxml.jackson.module', name: 'jackson-module-jsonSchema', version: jacksonVersion + compile group: 'com.github.everit-org.json-schema', name: 'org.everit.json.schema', version: everitVersion testCompile group: 'org.slf4j', name: 'log4j-over-slf4j', version: slf4jApiVersion testCompile group: 'ch.qos.logback', name: 'logback-classic', version: qosLogbackVersion testCompile group: 'io.pravega', name: 'pravega-test-testcommon', version: pravegaVersion @@ -293,6 +297,7 @@ project('server') { compile group: 'com.google.protobuf', name: 'protobuf-java-util', version: protobufUtilVersion compile group: 'com.google.protobuf', name: 'protobuf-java', version: protobufProtocVersion compile group: 'com.fasterxml.jackson.module', name: 'jackson-module-jsonSchema', version: jacksonVersion + compile group: 'com.github.everit-org.json-schema', name: 'org.everit.json.schema', version: everitVersion testCompile (group: 'io.pravega', name: 'pravega-standalone', version: pravegaVersion) { exclude group: 'javax.ws.rs', module: 'jsr311-api' } diff --git a/checkstyle/import-control.xml b/checkstyle/import-control.xml index 2f88803e8..fc347f038 100644 --- a/checkstyle/import-control.xml +++ b/checkstyle/import-control.xml @@ -43,6 +43,8 @@ + + diff --git a/gradle.properties b/gradle.properties index 96439bc3a..a24aae896 100644 --- a/gradle.properties +++ b/gradle.properties @@ -27,6 +27,7 @@ gradleSshPluginVersion=2.9.0 guavaVersion=28.1-jre javaxServletApiVersion=4.0.0 jacksonVersion=2.11.1 +everitVersion=1.12.1 javaxwsrsApiVersion=2.1 jaxbVersion=2.3.0 javaxAnnotationVersion=1.3.2 diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java index 55a90cf80..802063689 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -22,6 +22,10 @@ import io.pravega.schemaregistry.contract.data.SerializationFormat; import lombok.Getter; import org.apache.avro.specific.SpecificRecordBase; +import org.everit.json.schema.loader.SchemaLoader; +import org.everit.json.schema.loader.SpecificationVersion; +import org.json.JSONObject; +import org.json.JSONTokener; import java.nio.ByteBuffer; @@ -38,30 +42,30 @@ public class JSONSchema implements Schema { private final Class base; @Getter private final Class derived; - + @Getter - private final JsonSchema schema; + private final org.everit.json.schema.Schema schema; private final SchemaInfo schemaInfo; - private JSONSchema(JsonSchema schema, String name, String schemaString, Class derived) { - this(schema, name, schemaString, derived, derived); + private JSONSchema(String name, String schemaString, Class derived) { + this(name, schemaString, derived, derived); } - private JSONSchema(JsonSchema schema, String name, String schemaString, Class base, Class derived) { + private JSONSchema(String name, String schemaString, Class base, Class derived) { this.schemaString = schemaString; this.schemaInfo = new SchemaInfo(name, SerializationFormat.Json, getSchemaBytes(), ImmutableMap.of()); this.base = base; this.derived = derived; - this.schema = schema; + this.schema = getSchemaObj(schemaString); } - private JSONSchema(SchemaInfo schemaInfo, JsonSchema schema, String schemaString, Class derived) { + private JSONSchema(SchemaInfo schemaInfo, String schemaString, Class derived) { this.schemaString = schemaString; this.schemaInfo = schemaInfo; this.base = derived; this.derived = derived; - this.schema = schema; + this.schema = getSchemaObj(schemaString); } /** @@ -78,8 +82,7 @@ public static JSONSchema of(Class tClass) { JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(OBJECT_MAPPER); JsonSchema schema = schemaGen.generateSchema(tClass); String schemaString = OBJECT_MAPPER.writeValueAsString(schema); - - return new JSONSchema<>(schema, tClass.getName(), schemaString, tClass); + return new JSONSchema<>(tClass.getName(), schemaString, tClass); } catch (JsonProcessingException e) { throw new IllegalArgumentException("Unable to get json schema from the class", e); } @@ -91,15 +94,17 @@ public static JSONSchema of(Class tClass) { * * @param type type of object identified by {@link SchemaInfo#getType()}. * @param schema Schema to use. + * @param tClass class for the type of object + * @param Type of object * @return Returns an JSONSchema with {@link Object} type. */ - public static JSONSchema of(String type, JsonSchema schema) { + public static JSONSchema of(String type, JsonSchema schema, Class tClass) { Preconditions.checkNotNull(type); Preconditions.checkNotNull(schema); try { String schemaString = OBJECT_MAPPER.writeValueAsString(schema); - return new JSONSchema<>(schema, type, schemaString, Object.class); + return new JSONSchema<>(type, schemaString, tClass); } catch (JsonProcessingException e) { throw new IllegalArgumentException("Unable to get json schema string from the JsonSchema object", e); } @@ -117,12 +122,7 @@ public static JSONSchema of(String type, JsonSchema schema) { public static JSONSchema of(String type, String schemaString, Class tClass) { Preconditions.checkNotNull(type, "Type cannot be null."); Preconditions.checkArgument(!Strings.isNullOrEmpty(schemaString), "Schema String cannot be null or empty."); - try { - JsonSchema schema = OBJECT_MAPPER.readValue(schemaString, JsonSchema.class); - return new JSONSchema<>(schema, type, schemaString, tClass); - } catch (JsonProcessingException e) { - throw new IllegalArgumentException("Unable to parse schema string", e); - } + return new JSONSchema<>(type, schemaString, tClass); } /** @@ -140,10 +140,10 @@ public static JSONSchema ofBaseType(Class tDerived, Class Preconditions.checkNotNull(tBase); try { JsonSchemaGenerator schemaGen = new JsonSchemaGenerator(OBJECT_MAPPER); - JsonSchema schema = schemaGen.generateSchema(tDerived); - String schemaString = OBJECT_MAPPER.writeValueAsString(schema); + JsonSchema jsonSchema = schemaGen.generateSchema(tDerived); + String schemaString = OBJECT_MAPPER.writeValueAsString(jsonSchema); - return new JSONSchema<>(schema, tDerived.getName(), schemaString, tBase, tDerived); + return new JSONSchema<>(tDerived.getName(), schemaString, tBase, tDerived); } catch (JsonProcessingException e) { throw new IllegalArgumentException("Unable to get json schema from the class", e); } @@ -157,16 +157,26 @@ public static JSONSchema ofBaseType(Class tDerived, Class */ public static JSONSchema from(SchemaInfo schemaInfo) { Preconditions.checkNotNull(schemaInfo); - try { - String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); + String schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); - JsonSchema schema = OBJECT_MAPPER.readValue(schemaString, JsonSchema.class); - return new JSONSchema<>(schemaInfo, schema, schemaString, JsonNode.class); - } catch (JsonProcessingException e) { - throw new IllegalArgumentException("Unable to get json schema from schema info", e); - } + return new JSONSchema<>(schemaInfo, schemaString, JsonNode.class); } + private static org.everit.json.schema.Schema getSchemaObj(String schemaString) { + JSONObject rawSchema = new JSONObject(new JSONTokener(schemaString)); + // It will check if the schema has "id" then it is definitely version 4. + // if $schema draft is specified, the schemaloader will automatically use the correct specification version + // however, $schema is not mandatory. So we will check with presence of id and if id is specified with draft 4 + // specification, then we use draft 4, else we will use draft 7 as other keywords are added in draft 7. + if (rawSchema.has(SpecificationVersion.DRAFT_4.idKeyword())) { + return SchemaLoader.builder().useDefaults(true).schemaJson(rawSchema) + .build().load().build(); + } else { + return SchemaLoader.builder().useDefaults(true).schemaJson(rawSchema).draftV7Support() + .build().load().build(); + } + } + private ByteBuffer getSchemaBytes() { return ByteBuffer.wrap(schemaString.getBytes(Charsets.UTF_8)); } diff --git a/server/src/main/java/io/pravega/schemaregistry/service/SchemaRegistryService.java b/server/src/main/java/io/pravega/schemaregistry/service/SchemaRegistryService.java index c3c598b1e..2a7442663 100644 --- a/server/src/main/java/io/pravega/schemaregistry/service/SchemaRegistryService.java +++ b/server/src/main/java/io/pravega/schemaregistry/service/SchemaRegistryService.java @@ -9,6 +9,8 @@ */ package io.pravega.schemaregistry.service; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.MapperFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; @@ -44,8 +46,13 @@ import io.pravega.schemaregistry.storage.StoreExceptions; import lombok.extern.slf4j.Slf4j; import org.apache.avro.Schema; +import org.everit.json.schema.loader.SchemaLoader; +import org.everit.json.schema.loader.SpecificationVersion; +import org.json.JSONObject; +import org.json.JSONTokener; import javax.annotation.Nullable; +import java.io.IOException; import java.nio.ByteBuffer; import java.util.AbstractMap; import java.util.Collections; @@ -843,8 +850,12 @@ private SchemaInfo normalizeSchemaBinary(SchemaInfo schemaInfo) { break; case Json: schemaString = new String(schemaInfo.getSchemaData().array(), Charsets.UTF_8); - JsonSchema jsonSchema = OBJECT_MAPPER.readValue(schemaString, JsonSchema.class); - schemaBinary = ByteBuffer.wrap(OBJECT_MAPPER.writeValueAsString(jsonSchema).getBytes(Charsets.UTF_8)); + validateJsonSchema(schemaString); + // normalize json schema string by parsing it into JsonNode and then serializing it with fields + // in alphabetical order. This ensures that identical schemas with different order of fields are + // treated to be equal. + JsonNode jsonNode = OBJECT_MAPPER.readTree(schemaString); + schemaBinary = ByteBuffer.wrap(OBJECT_MAPPER.writeValueAsString(jsonNode).getBytes(Charsets.UTF_8)); break; case Any: break; @@ -853,7 +864,7 @@ private SchemaInfo normalizeSchemaBinary(SchemaInfo schemaInfo) { default: break; } - } catch (Exception e) { + } catch (IOException | RuntimeException e) { log.debug("unable to parse schema {}", e.getMessage()); isValid = false; invalidityCause = "Unable to parse schema"; @@ -864,6 +875,38 @@ private SchemaInfo normalizeSchemaBinary(SchemaInfo schemaInfo) { return new SchemaInfo(schemaInfo.getType(), schemaInfo.getSerializationFormat(), schemaBinary, schemaInfo.getProperties()); } + private void validateJsonSchema(String schemaString) { + try { + // 1. try draft 3 + // jackson JsonSchema only supports json draft 3. If the schema definition is not compatible with draft 3, + // try parsing the schema with everit library which supports drafts 4 6 and 7. + OBJECT_MAPPER.readValue(schemaString, JsonSchema.class); + } catch (JsonProcessingException e) { + validateJsonSchema4Onward(schemaString); + } + } + + /** + * This method checks if the schema is well formed according to draft v4 onward. + * Changes between draft 4 and 6/7 https://json-schema.org/draft-06/json-schema-release-notes.html + * + * @param schemaString Schema string to validate + */ + private void validateJsonSchema4Onward(String schemaString) { + JSONObject rawSchema = new JSONObject(new JSONTokener(schemaString)); + // It will check if the schema has "id" then it is definitely version 4. + // if $schema draft is specified, the schemaloader will automatically use the correct specification version + // however, $schema is not mandatory. So we will check with presence of id and if id is specified with draft 4 + // specification, then we use draft 4, else we will use draft 7 as other keywords are added in draft 7. + if (rawSchema.has(SpecificationVersion.DRAFT_4.idKeyword())) { + SchemaLoader.builder().useDefaults(true).schemaJson(rawSchema) + .build().load().build(); + } else { + SchemaLoader.builder().useDefaults(true).schemaJson(rawSchema).draftV7Support() + .build().load().build(); + } + } + private Boolean canReadChecker(SchemaInfo schema, GroupProperties prop, List schemasWithVersion) { CompatibilityChecker checker = CompatibilityCheckerFactory.getCompatibilityChecker(schema.getSerializationFormat()); From a8fb0dd41c1729ce79cc1776e0bd3fcc03cd7538 Mon Sep 17 00:00:00 2001 From: Shivesh Ranjan Date: Mon, 20 Jul 2020 22:56:56 -0700 Subject: [PATCH 70/70] PR comments, javadoc corrections, test addition Signed-off-by: Shivesh Ranjan --- .../pravega/schemaregistry/codec/Codecs.java | 3 +- .../schemaregistry/schemas/AvroSchema.java | 6 +-- .../schemaregistry/schemas/JSONSchema.java | 2 +- .../serializers/AbstractDeserializer.java | 24 +++++----- .../serializers/SerializerFactoryHelper.java | 21 +++++++-- .../serializers/WithSchema.java | 4 +- .../schemaregistry/schemas/SchemasTest.java | 13 +++++- .../testobjs/SchemaDefinitions.java | 45 +++++++++++++++++++ 8 files changed, 94 insertions(+), 24 deletions(-) diff --git a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java index ee07be335..5f5155c05 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/codec/Codecs.java @@ -56,8 +56,7 @@ public void encode(ByteBuffer data, ByteArrayOutputStream bos) { if (data.hasArray()) { bos.write(data.array(), data.arrayOffset() + data.position(), data.remaining()); } else { - byte[] b = new byte[data.remaining()]; - data.get(b); + byte[] b = getBytes(data); bos.write(b, 0, b.length); } } diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java index 09efb80f2..4fccf058b 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/AvroSchema.java @@ -72,7 +72,7 @@ public static AvroSchema of(Class tClass) { * This schema can be used to express any non record schema. * * @param schema Schema to use. - * @return Returns an AvroSchema with {@link GenericRecord} type. + * @return Returns an AvroSchema with {@link Object} type. */ public static AvroSchema of(org.apache.avro.Schema schema) { return new AvroSchema<>(schema, Object.class); @@ -121,10 +121,10 @@ public static AvroSchema ofSp } /** - * Method to create a typed AvroSchema of type {@link GenericRecord} from schema info. + * Method to create a typed AvroSchema of type {@link Object} from schema info. * * @param schemaInfo Schema info object that has schema data in binary form. - * @return Returns an AvroSchema with {@link GenericRecord} type. + * @return Returns an AvroSchema with {@link Object} type. */ public static AvroSchema from(SchemaInfo schemaInfo) { return new AvroSchema<>(schemaInfo); diff --git a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java index 802063689..c0206d7e2 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/schemas/JSONSchema.java @@ -89,7 +89,7 @@ public static JSONSchema of(Class tClass) { } /** - * Method to create a typed JSONSchema of type {@link Object} from the given schema. + * Method to create a typed JSONSchema of type T from the given schema. * This method can be used to pass Json schema string which can be used to represent primitive data types. * * @param type type of object identified by {@link SchemaInfo#getType()}. diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java index 7400fb6ab..eb3e13091 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/AbstractDeserializer.java @@ -76,12 +76,15 @@ private void initialize() { @Override public T deserialize(ByteBuffer data) { int start = data.hasArray() ? data.arrayOffset() + data.position() : data.position(); + ByteArrayInputStream inputStream; + SchemaInfo writerSchema; + SchemaInfo readerSchema; if (this.encodeHeader) { - SchemaInfo writerSchema = null; ByteBuffer decoded; if (skipHeaders) { data.position(start + HEADER_SIZE); decoded = data; + writerSchema = null; } else { byte protocol = data.get(); EncodingId encodingId = new EncodingId(data.getInt()); @@ -90,15 +93,10 @@ public T deserialize(ByteBuffer data) { decoded = decoders.decode(encodingInfo.getCodecType(), data); } - ByteArrayInputStream bais = new ByteArrayInputStream(decoded.array(), + inputStream = new ByteArrayInputStream(decoded.array(), decoded.arrayOffset() + decoded.position(), decoded.remaining()); - if (schemaInfo == null) { // deserialize into writer schema - // pass writer schema for schema to be read into - return deserialize(bais, writerSchema, writerSchema); - } else { - // pass reader schema for schema on read to the underlying implementation - return deserialize(bais, writerSchema, schemaInfo); - } + // pass writer schema for schema to be read into + readerSchema = schemaInfo == null ? writerSchema : schemaInfo; } else { byte[] b; if (data.hasArray()) { @@ -107,11 +105,13 @@ public T deserialize(ByteBuffer data) { b = new byte[data.remaining()]; data.get(b); } + writerSchema = null; + readerSchema = schemaInfo; // pass reader schema for schema on read to the underlying implementation - ByteArrayInputStream inputStream = new ByteArrayInputStream(b, start, data.remaining()); - - return deserialize(inputStream, null, schemaInfo); + inputStream = new ByteArrayInputStream(b, start, data.remaining()); } + + return deserialize(inputStream, writerSchema, readerSchema); } protected abstract T deserialize(InputStream inputStream, SchemaInfo writerSchema, SchemaInfo readerSchema) throws IOException; diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java index be13b9c08..c7816189a 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/SerializerFactoryHelper.java @@ -9,7 +9,11 @@ */ package io.pravega.schemaregistry.serializers; +import com.google.common.base.Strings; +import io.pravega.client.ClientConfig; +import io.pravega.client.stream.impl.Credentials; import io.pravega.schemaregistry.client.SchemaRegistryClient; +import io.pravega.schemaregistry.client.SchemaRegistryClientConfig; import io.pravega.schemaregistry.client.SchemaRegistryClientFactory; import io.pravega.schemaregistry.contract.data.CodecType; import lombok.extern.slf4j.Slf4j; @@ -34,9 +38,20 @@ static SchemaRegistryClient initForDeserializer(SerializerConfig config) { } private static SchemaRegistryClient getSchemaRegistryClient(SerializerConfig config) { - return config.getRegistryConfigOrClient().isLeft() ? - SchemaRegistryClientFactory.withNamespace(config.getNamespace(), config.getRegistryConfigOrClient().getLeft()) : - config.getRegistryConfigOrClient().getRight(); + if (config.getRegistryConfigOrClient().isLeft()) { + // if auth is enabled and creds are not supplied, reuse the credentials from pravega client config which may + // be loaded from system properties. + SchemaRegistryClientConfig left = config.getRegistryConfigOrClient().getLeft(); + if (left.isAuthEnabled() && Strings.isNullOrEmpty(left.getAuthMethod())) { + Credentials creds = ClientConfig.builder().build().getCredentials(); + left = SchemaRegistryClientConfig.builder().schemaRegistryUri(left.getSchemaRegistryUri()).authEnabled(left.isAuthEnabled()) + .authMethod(creds.getAuthenticationType()).authToken(creds.getAuthenticationToken()) + .build(); + } + return SchemaRegistryClientFactory.withNamespace(config.getNamespace(), left); + } else { + return config.getRegistryConfigOrClient().getRight(); + } } private static void createGroup(SchemaRegistryClient client, SerializerConfig config) { diff --git a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java index 25b0b9d19..3787da2f1 100644 --- a/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java +++ b/serializers/src/main/java/io/pravega/schemaregistry/serializers/WithSchema.java @@ -146,8 +146,8 @@ public boolean hasJsonSchema() { * @return Json schema String representing the schema for the object. */ @SuppressWarnings("unchecked") - public String getJsonSchema() { - return ((JSONSchema) schema).getSchemaString(); + public org.everit.json.schema.Schema getJsonSchema() { + return ((JSONSchema) schema).getSchema(); } /** diff --git a/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java b/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java index f3a6cba01..57bc2e7c8 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/schemas/SchemasTest.java @@ -9,6 +9,7 @@ */ package io.pravega.schemaregistry.schemas; +import com.fasterxml.jackson.databind.JsonNode; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DynamicMessage; import com.google.protobuf.GeneratedMessageV3; @@ -29,6 +30,8 @@ import java.nio.file.Paths; import static io.pravega.schemaregistry.testobjs.SchemaDefinitions.JSON_SCHEMA_STRING; +import static io.pravega.schemaregistry.testobjs.SchemaDefinitions.JSON_SCHEMA_STRING_DRAFT_4; +import static io.pravega.schemaregistry.testobjs.SchemaDefinitions.JSON_SCHEMA_STRING_DRAFT_7; import static org.junit.Assert.*; public class SchemasTest { @@ -103,10 +106,18 @@ public void testJsonSchema() { assertNotNull(schema.getSchema()); assertEquals(schema.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); - JSONSchema schema2 = JSONSchema.of("Person", JSON_SCHEMA_STRING, Object.class); + JSONSchema schema2 = JSONSchema.of("Person", JSON_SCHEMA_STRING, String.class); assertNotNull(schema2.getSchema()); assertEquals(schema2.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + JSONSchema schema3 = JSONSchema.of("", JSON_SCHEMA_STRING_DRAFT_4, JsonNode.class); + assertNotNull(schema3.getSchema()); + assertEquals(schema3.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + + JSONSchema schema4 = JSONSchema.of("", JSON_SCHEMA_STRING_DRAFT_7, JsonNode.class); + assertNotNull(schema4.getSchema()); + assertEquals(schema4.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); + JSONSchema baseSchema1 = JSONSchema.ofBaseType(DerivedUser1.class, User.class); assertNotNull(baseSchema1.getSchema()); assertEquals(baseSchema1.getSchemaInfo().getSerializationFormat(), SerializationFormat.Json); diff --git a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java index 62f732400..1feebf15f 100644 --- a/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java +++ b/serializers/src/test/java/io/pravega/schemaregistry/testobjs/SchemaDefinitions.java @@ -61,4 +61,49 @@ public class SchemaDefinitions { "}" + "}" + "}"; + + public static final String JSON_SCHEMA_STRING_DRAFT_4 = "{\n" + + " \"$schema\": \"http://json-schema.org/draft-04/schema#\",\n" + + " \"title\": \"User\",\n" + + " \"id\": \"UserV4\",\n" + + " \"type\": \"object\",\n" + + "\t\n" + + " \"properties\": {\n" + + "\t\n" + + " \"id\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + "\t\t\n" + + " \"name\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + "\t\t\n" + + " \"age\": {\n" + + " \"type\": \"number\",\n" + + " \"minimum\": 0,\n" + + " \"exclusiveMinimum\": true\n" + + " }\n" + + " },\n" + + "\t\n" + + " \"required\": [\"id\", \"name\", \"age\"]\n" + + "}"; + + public static final String JSON_SCHEMA_STRING_DRAFT_7 = "{\n" + + " \"$id\": \"UserV7\",\n" + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n" + + " \"title\": \"User\",\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"firstName\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + " \"lastName\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\",\n" + + " \"minimum\": 0\n" + + " }\n" + + " }\n" + + "}"; }