[ https://issues.apache.org/jira/browse/FLINK-8983?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16512665#comment-16512665 ]
ASF GitHub Bot commented on FLINK-8983: --------------------------------------- Github user tillrohrmann commented on a diff in the pull request: https://github.com/apache/flink/pull/6083#discussion_r195472438 --- Diff: flink-end-to-end-tests/flink-confluent-schema-registry/src/main/java/AvroSerializationConfluentSchema.java --- @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.flink.api.common.serialization.SerializationSchema; + +import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; +import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig; +import io.confluent.kafka.serializers.KafkaAvroSerializer; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import tech.allegro.schema.json2avro.converter.JsonAvroConverter; + +/** + * The serialization schema for the Avro type. + */ +public class AvroSerializationConfluentSchema<T> implements SerializationSchema<T> { + + private static final long serialVersionUID = 1L; + + private final String schemaRegistryUrl; + + private final int identityMapCapacity; + + private KafkaAvroSerializer kafkaAvroSerializer; + + private String topicName; + + private SchemaRegistryClient schemaRegistryClient; + + private JsonAvroConverter jsonAvroConverter; + + private final Class<T> avroType; + + public AvroSerializationConfluentSchema(Class<T> avroType, String schemaRegistryUrl, String topicName) { + this(avroType, schemaRegistryUrl, AbstractKafkaAvroSerDeConfig.MAX_SCHEMAS_PER_SUBJECT_DEFAULT, topicName); + } + + public AvroSerializationConfluentSchema(Class<T> avroType, String schemaRegistryUrl, int identityMapCapacity, String topicName) { + this.schemaRegistryUrl = schemaRegistryUrl; + this.identityMapCapacity = identityMapCapacity; + this.topicName = topicName; + this.avroType = avroType; + } + + @Override + public byte[] serialize(T obj) { + byte[] serializedBytes = null; + + try { + if (kafkaAvroSerializer == null) { + this.schemaRegistryClient = new CachedSchemaRegistryClient(this.schemaRegistryUrl, this.identityMapCapacity); + this.kafkaAvroSerializer = new KafkaAvroSerializer(schemaRegistryClient); + } + + String schema = schemaRegistryClient.getLatestSchemaMetadata(topicName + "-value").getSchema(); + + if (jsonAvroConverter == null) { + jsonAvroConverter = new JsonAvroConverter(); + } + + //System.out.println("Schema fetched from Schema Registry for topic :" + topicName + " = " + schema); + GenericData.Record record = jsonAvroConverter.convertToGenericDataRecord(obj.toString().getBytes(), new Schema.Parser().parse(schema)); + + if (GenericData.get().validate(new Schema.Parser().parse(schema), record)) { + serializedBytes = kafkaAvroSerializer.serialize(topicName, record); + + } else { + System.out.println("Error :Invalid message : Doesn't follow the avro schema : Message not published to the topic, message = " + record.toString()); --- End diff -- No println logging. Better to use proper loggers. > End-to-end test: Confluent schema registry > ------------------------------------------ > > Key: FLINK-8983 > URL: https://issues.apache.org/jira/browse/FLINK-8983 > Project: Flink > Issue Type: Sub-task > Components: Kafka Connector, Tests > Reporter: Till Rohrmann > Assignee: Yazdan Shirvany > Priority: Critical > > It would be good to add an end-to-end test which verifies that Flink is able > to work together with the Confluent schema registry. In order to do that we > have to setup a Kafka cluster and write a Flink job which reads from the > Confluent schema registry producing an Avro type. -- This message was sent by Atlassian JIRA (v7.6.3#76005)