[ 
https://issues.apache.org/jira/browse/FLINK-8538?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16374097#comment-16374097
 ] 

ASF GitHub Bot commented on FLINK-8538:
---------------------------------------

Github user xccui commented on a diff in the pull request:

    https://github.com/apache/flink/pull/5564#discussion_r170191830
  
    --- Diff: 
flink-connectors/flink-connector-kafka-base/src/test/java/org/apache/flink/streaming/connectors/kafka/KafkaJsonTableSourceFactoryTestBase.java
 ---
    @@ -0,0 +1,136 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one
    + * or more contributor license agreements.  See the NOTICE file
    + * distributed with this work for additional information
    + * regarding copyright ownership.  The ASF licenses this file
    + * to you under the Apache License, Version 2.0 (the
    + * "License"); you may not use this file except in compliance
    + * with the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.flink.streaming.connectors.kafka;
    +
    +import org.apache.flink.api.common.typeinfo.Types;
    +import org.apache.flink.formats.json.JsonSchemaConverter;
    +import 
org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition;
    +import org.apache.flink.table.api.TableSchema;
    +import org.apache.flink.table.descriptors.Json;
    +import org.apache.flink.table.descriptors.Kafka;
    +import org.apache.flink.table.descriptors.Schema;
    +import org.apache.flink.table.descriptors.TestTableSourceDescriptor;
    +import org.apache.flink.table.sources.TableSource;
    +import org.apache.flink.table.sources.TableSourceFactory;
    +import org.apache.flink.table.sources.TableSourceFactoryService;
    +import org.apache.flink.types.Row;
    +
    +import org.junit.Test;
    +
    +import java.util.HashMap;
    +import java.util.Map;
    +import java.util.Properties;
    +
    +import static org.junit.Assert.assertEquals;
    +
    +/**
    + * Tests for {@link KafkaJsonTableSourceFactory}.
    + */
    +public abstract class KafkaJsonTableSourceFactoryTestBase {
    +
    +   private static final String JSON_SCHEMA =
    +           "{" +
    +           "  'title': 'Fruit'," +
    +           "  'type': 'object'," +
    +           "  'properties': {" +
    +           "    'name': {" +
    +           "      'type': 'string'" +
    +           "    }," +
    +           "    'count': {" +
    +           "      'type': 'integer'" +
    +           "    }," +
    +           "    'time': {" +
    +           "      'description': 'Age in years'," +
    +           "      'type': 'number'" +
    +           "    }" + "  }," +
    +           "  'required': ['name', 'count', 'time']" +
    +           "}";
    +
    +   private static final String TOPIC = "test-topic";
    +
    +   protected abstract String version();
    +
    +   protected abstract KafkaJsonTableSource.Builder builder();
    +
    +   protected abstract KafkaJsonTableSourceFactory factory();
    +
    +   @Test
    +   public void testResultingTableSource() {
    +
    +           // construct table source using a builder
    +
    +           final Map<String, String> tableJsonMapping = new HashMap<>();
    +           tableJsonMapping.put("fruit-name", "name");
    +           tableJsonMapping.put("count", "count");
    +           tableJsonMapping.put("event-time", "time");
    +
    +           final Properties props = new Properties();
    +           props.put("group.id", "test-group");
    +           props.put("bootstrap.servers", "localhost:1234");
    +
    +           final Map<KafkaTopicPartition, Long> specificOffsets = new 
HashMap<>();
    +           specificOffsets.put(new KafkaTopicPartition(TOPIC, 0), 100L);
    +           specificOffsets.put(new KafkaTopicPartition(TOPIC, 1), 123L);
    +
    +           final KafkaTableSource builderSource = builder()
    +                           
.forJsonSchema(TableSchema.fromTypeInfo(JsonSchemaConverter.convert(JSON_SCHEMA)))
    +                           .failOnMissingField(true)
    +                           .withTableToJsonMapping(tableJsonMapping)
    +                           .withKafkaProperties(props)
    +                           .forTopic(TOPIC)
    +                           .fromSpecificOffsets(specificOffsets)
    +                           .withSchema(
    +                                   TableSchema.builder()
    +                                           .field("fruit-name", 
Types.STRING)
    +                                           .field("count", Types.INT)
    +                                           .field("event-time", Types.LONG)
    +                                           .field("proc-time", 
Types.SQL_TIMESTAMP)
    +                                           .build())
    +                           .withProctimeAttribute("proc-time")
    +                           .build();
    +
    +           // construct table source using descriptors and table source 
factory
    +
    +           final Map<Integer, Long> offsets = new HashMap<>();
    +           offsets.put(0, 100L);
    +           offsets.put(1, 123L);
    +
    +           final TestTableSourceDescriptor testDesc = new 
TestTableSourceDescriptor(
    +                           new Kafka()
    +                                   .version(version())
    +                                   .topic(TOPIC)
    +                                   .properties(props)
    +                                   .startFromSpecificOffsets(offsets))
    +                   .addFormat(
    +                           new Json()
    +                                           .jsonSchema(JSON_SCHEMA)
    +                                           .failOnMissingField(true))
    +                   .addSchema(
    +                           new Schema()
    +                                           .field("fruit-name", 
Types.STRING).from("name")
    +                                           .field("count", Types.INT) // 
no from so it must match with the input
    +                                           .field("event-time", 
Types.LONG).from("time")
    +                                           .field("proc-time", 
Types.SQL_TIMESTAMP).proctime());
    +           final TableSourceFactory<Row> factory = factory();
    --- End diff --
    
    The `factory` is useless.


> Add a Kafka table source factory with JSON format support
> ---------------------------------------------------------
>
>                 Key: FLINK-8538
>                 URL: https://issues.apache.org/jira/browse/FLINK-8538
>             Project: Flink
>          Issue Type: Sub-task
>          Components: Table API &amp; SQL
>            Reporter: Timo Walther
>            Assignee: Xingcan Cui
>            Priority: Major
>
> Similar to CSVTableSourceFactory a Kafka table source factory for JSON should 
> be added. This issue includes improving the existing JSON descriptor with 
> validation that can be used for other connectors as well. It is up for 
> discussion if we want to split the KafkaJsonTableSource into connector and 
> format such that we can reuse the format for other table sources as well.



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to