godfreyhe commented on a change in pull request #15239:
URL: https://github.com/apache/flink/pull/15239#discussion_r596525054



##########
File path: 
flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/JoinJsonPlanITCase.java
##########
@@ -0,0 +1,204 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.runtime.stream.jsonplan;
+
+import org.apache.flink.table.planner.factories.TestValuesTableFactory;
+import org.apache.flink.table.planner.utils.JsonPlanTestBase;
+import org.apache.flink.types.Row;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/** Test for join json plan. */
+public class JoinJsonPlanITCase extends JsonPlanTestBase {
+    private static final List<Row> small5FieldsRowData =

Review comment:
       may be we can use `TestData.data5` with b <= 5 to replace 
small5FieldsRowData

##########
File path: 
flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/JoinJsonPlanITCase.java
##########
@@ -0,0 +1,204 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.runtime.stream.jsonplan;
+
+import org.apache.flink.table.planner.factories.TestValuesTableFactory;
+import org.apache.flink.table.planner.utils.JsonPlanTestBase;
+import org.apache.flink.types.Row;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/** Test for join json plan. */
+public class JoinJsonPlanITCase extends JsonPlanTestBase {
+    private static final List<Row> small5FieldsRowData =
+            Arrays.asList(
+                    Row.of(1, 1L, 0, "Hallo", 1L),
+                    Row.of(2, 2L, 1, "Hallo Welt", 2L),
+                    Row.of(2, 3L, 2, "Hallo Welt wie", 1L),
+                    Row.of(3, 4L, 3, "Hallo Welt wie gehts?", 2L),
+                    Row.of(3, 5L, 4, "ABC", 2L),
+                    Row.of(3, 6L, 5, "BCD", 3L));
+    private static final List<Row> small3FieldsRowData =
+            Arrays.asList(
+                    Row.of(1, 1L, "Hi"), Row.of(2, 2L, "Hello"), Row.of(3, 2L, 
"Hello world"));
+
+    @Override
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+        createTestValuesSourceTable("A", small3FieldsRowData, "a1 int", "a2 
bigint", "a3 varchar");
+        createTestValuesSourceTable(
+                "B",
+                small5FieldsRowData,
+                "b1 int",
+                "b2 bigint",
+                "b3 int",
+                "b4 varchar",
+                "b5 bigint");
+    }
+
+    /** test non-window inner join. * */
+    @Test
+    public void testNonWindowInnerJoin() throws Exception {
+        List<String> dataT1 =
+                Arrays.asList(
+                        "1,1,Hi1", "1,2,Hi2", "1,2,Hi2", "1,5,Hi3", "2,7,Hi5", 
"1,9,Hi6", "1,8,Hi8",
+                        "3,8,Hi9");
+        List<String> dataT2 = Arrays.asList("1,1,HiHi", "2,2,HeHe", 
"3,2,HeHe");
+        createTestCsvSourceTable("T1", dataT1, "a int", "b bigint", "c 
varchar");
+        createTestCsvSourceTable("T2", dataT2, "a int", "b bigint", "c 
varchar");
+        File sinkPath = createTestCsvSinkTable("MySink", "a int", "c1 
varchar", "c2 varchar");
+
+        String jsonPlan =
+                tableEnv.getJsonPlan(
+                        "insert into MySink "
+                                + "SELECT t2.a, t2.c, t1.c\n"
+                                + "FROM (\n"
+                                + " SELECT if(a = 3, cast(null as int), a) as 
a, b, c FROM T1\n"
+                                + ") as t1\n"
+                                + "JOIN (\n"
+                                + " SELECT if(a = 3, cast(null as int), a) as 
a, b, c FROM T2\n"
+                                + ") as t2\n"
+                                + "ON t1.a = t2.a AND t1.b > t2.b");
+        tableEnv.executeJsonPlan(jsonPlan).await();
+        List<String> expected =
+                Arrays.asList(
+                        "1,HiHi,Hi2",
+                        "1,HiHi,Hi2",
+                        "1,HiHi,Hi3",
+                        "1,HiHi,Hi6",
+                        "1,HiHi,Hi8",
+                        "2,HeHe,Hi5");
+        assertResult(expected, sinkPath);
+    }
+
+    @Test
+    public void testIsNullInnerJoinWithNullCond() throws Exception {
+        List<String> dataT1 =
+                Arrays.asList(
+                        "1,1,Hi1", "1,2,Hi2", "1,2,Hi2", "1,5,Hi3", "2,7,Hi5", 
"1,9,Hi6", "1,8,Hi8",
+                        "3,8,Hi9");
+        List<String> dataT2 = Arrays.asList("1,1,HiHi", "2,2,HeHe", 
"3,2,HeHe");
+        createTestCsvSourceTable("T1", dataT1, "a int", "b bigint", "c 
varchar");
+        createTestCsvSourceTable("T2", dataT2, "a int", "b bigint", "c 
varchar");
+        createTestValuesSinkTable("MySink", "a int", "c1 varchar", "c2 
varchar");
+
+        String jsonPlan =
+                tableEnv.getJsonPlan(
+                        "insert into MySink "
+                                + "SELECT t2.a, t2.c, t1.c\n"
+                                + "FROM (\n"
+                                + " SELECT if(a = 3, cast(null as int), a) as 
a, b, c FROM T1\n"
+                                + ") as t1\n"
+                                + "JOIN (\n"
+                                + " SELECT if(a = 3, cast(null as int), a) as 
a, b, c FROM T2\n"
+                                + ") as t2\n"
+                                + "ON \n"
+                                + "  ((t1.a is null AND t2.a is null) OR\n"
+                                + "  (t1.a = t2.a))\n"
+                                + "  AND t1.b > t2.b");
+        tableEnv.executeJsonPlan(jsonPlan).await();
+        List<String> expected =
+                Arrays.asList(
+                        "+I[1, HiHi, Hi2]",
+                        "+I[1, HiHi, Hi2]",
+                        "+I[1, HiHi, Hi3]",
+                        "+I[1, HiHi, Hi6]",
+                        "+I[1, HiHi, Hi8]",
+                        "+I[2, HeHe, Hi5]",
+                        "+I[null, HeHe, Hi9]");
+        assertResult(
+                expected.stream().sorted().collect(Collectors.toList()),
+                TestValuesTableFactory.getResults("MySink").stream()
+                        .sorted()
+                        .collect(Collectors.toList()));

Review comment:
       `assertResult` method has done the sorting work

##########
File path: 
flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/JoinJsonPlanITCase.java
##########
@@ -0,0 +1,204 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.runtime.stream.jsonplan;
+
+import org.apache.flink.table.planner.factories.TestValuesTableFactory;
+import org.apache.flink.table.planner.utils.JsonPlanTestBase;
+import org.apache.flink.types.Row;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/** Test for join json plan. */
+public class JoinJsonPlanITCase extends JsonPlanTestBase {
+    private static final List<Row> small5FieldsRowData =
+            Arrays.asList(
+                    Row.of(1, 1L, 0, "Hallo", 1L),
+                    Row.of(2, 2L, 1, "Hallo Welt", 2L),
+                    Row.of(2, 3L, 2, "Hallo Welt wie", 1L),
+                    Row.of(3, 4L, 3, "Hallo Welt wie gehts?", 2L),
+                    Row.of(3, 5L, 4, "ABC", 2L),
+                    Row.of(3, 6L, 5, "BCD", 3L));
+    private static final List<Row> small3FieldsRowData =
+            Arrays.asList(
+                    Row.of(1, 1L, "Hi"), Row.of(2, 2L, "Hello"), Row.of(3, 2L, 
"Hello world"));
+
+    @Override
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+        createTestValuesSourceTable("A", small3FieldsRowData, "a1 int", "a2 
bigint", "a3 varchar");

Review comment:
       use `JavaScalaConversionUtil.toJava(TestData.smallData3())` to replace 
`small3FieldsRowData`

##########
File path: 
flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/JoinJsonPlanITCase.java
##########
@@ -0,0 +1,204 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.runtime.stream.jsonplan;
+
+import org.apache.flink.table.planner.factories.TestValuesTableFactory;
+import org.apache.flink.table.planner.utils.JsonPlanTestBase;
+import org.apache.flink.types.Row;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/** Test for join json plan. */
+public class JoinJsonPlanITCase extends JsonPlanTestBase {
+    private static final List<Row> small5FieldsRowData =
+            Arrays.asList(
+                    Row.of(1, 1L, 0, "Hallo", 1L),
+                    Row.of(2, 2L, 1, "Hallo Welt", 2L),
+                    Row.of(2, 3L, 2, "Hallo Welt wie", 1L),
+                    Row.of(3, 4L, 3, "Hallo Welt wie gehts?", 2L),
+                    Row.of(3, 5L, 4, "ABC", 2L),
+                    Row.of(3, 6L, 5, "BCD", 3L));
+    private static final List<Row> small3FieldsRowData =

Review comment:
       we can use `TestData.smallData3`

##########
File path: 
flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/JoinJsonPlanITCase.java
##########
@@ -0,0 +1,204 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.runtime.stream.jsonplan;
+
+import org.apache.flink.table.planner.factories.TestValuesTableFactory;
+import org.apache.flink.table.planner.utils.JsonPlanTestBase;
+import org.apache.flink.types.Row;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/** Test for join json plan. */
+public class JoinJsonPlanITCase extends JsonPlanTestBase {
+    private static final List<Row> small5FieldsRowData =
+            Arrays.asList(
+                    Row.of(1, 1L, 0, "Hallo", 1L),
+                    Row.of(2, 2L, 1, "Hallo Welt", 2L),
+                    Row.of(2, 3L, 2, "Hallo Welt wie", 1L),
+                    Row.of(3, 4L, 3, "Hallo Welt wie gehts?", 2L),
+                    Row.of(3, 5L, 4, "ABC", 2L),
+                    Row.of(3, 6L, 5, "BCD", 3L));
+    private static final List<Row> small3FieldsRowData =
+            Arrays.asList(
+                    Row.of(1, 1L, "Hi"), Row.of(2, 2L, "Hello"), Row.of(3, 2L, 
"Hello world"));
+
+    @Override
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+        createTestValuesSourceTable("A", small3FieldsRowData, "a1 int", "a2 
bigint", "a3 varchar");
+        createTestValuesSourceTable(
+                "B",
+                small5FieldsRowData,
+                "b1 int",
+                "b2 bigint",
+                "b3 int",
+                "b4 varchar",
+                "b5 bigint");
+    }
+
+    /** test non-window inner join. * */
+    @Test
+    public void testNonWindowInnerJoin() throws Exception {
+        List<String> dataT1 =
+                Arrays.asList(
+                        "1,1,Hi1", "1,2,Hi2", "1,2,Hi2", "1,5,Hi3", "2,7,Hi5", 
"1,9,Hi6", "1,8,Hi8",
+                        "3,8,Hi9");
+        List<String> dataT2 = Arrays.asList("1,1,HiHi", "2,2,HeHe", 
"3,2,HeHe");
+        createTestCsvSourceTable("T1", dataT1, "a int", "b bigint", "c 
varchar");
+        createTestCsvSourceTable("T2", dataT2, "a int", "b bigint", "c 
varchar");
+        File sinkPath = createTestCsvSinkTable("MySink", "a int", "c1 
varchar", "c2 varchar");
+
+        String jsonPlan =
+                tableEnv.getJsonPlan(
+                        "insert into MySink "
+                                + "SELECT t2.a, t2.c, t1.c\n"
+                                + "FROM (\n"
+                                + " SELECT if(a = 3, cast(null as int), a) as 
a, b, c FROM T1\n"
+                                + ") as t1\n"
+                                + "JOIN (\n"
+                                + " SELECT if(a = 3, cast(null as int), a) as 
a, b, c FROM T2\n"
+                                + ") as t2\n"
+                                + "ON t1.a = t2.a AND t1.b > t2.b");
+        tableEnv.executeJsonPlan(jsonPlan).await();
+        List<String> expected =
+                Arrays.asList(
+                        "1,HiHi,Hi2",
+                        "1,HiHi,Hi2",
+                        "1,HiHi,Hi3",
+                        "1,HiHi,Hi6",
+                        "1,HiHi,Hi8",
+                        "2,HeHe,Hi5");
+        assertResult(expected, sinkPath);
+    }
+
+    @Test
+    public void testIsNullInnerJoinWithNullCond() throws Exception {
+        List<String> dataT1 =
+                Arrays.asList(
+                        "1,1,Hi1", "1,2,Hi2", "1,2,Hi2", "1,5,Hi3", "2,7,Hi5", 
"1,9,Hi6", "1,8,Hi8",
+                        "3,8,Hi9");
+        List<String> dataT2 = Arrays.asList("1,1,HiHi", "2,2,HeHe", 
"3,2,HeHe");
+        createTestCsvSourceTable("T1", dataT1, "a int", "b bigint", "c 
varchar");
+        createTestCsvSourceTable("T2", dataT2, "a int", "b bigint", "c 
varchar");
+        createTestValuesSinkTable("MySink", "a int", "c1 varchar", "c2 
varchar");
+
+        String jsonPlan =
+                tableEnv.getJsonPlan(
+                        "insert into MySink "
+                                + "SELECT t2.a, t2.c, t1.c\n"
+                                + "FROM (\n"
+                                + " SELECT if(a = 3, cast(null as int), a) as 
a, b, c FROM T1\n"
+                                + ") as t1\n"
+                                + "JOIN (\n"
+                                + " SELECT if(a = 3, cast(null as int), a) as 
a, b, c FROM T2\n"
+                                + ") as t2\n"
+                                + "ON \n"
+                                + "  ((t1.a is null AND t2.a is null) OR\n"
+                                + "  (t1.a = t2.a))\n"
+                                + "  AND t1.b > t2.b");
+        tableEnv.executeJsonPlan(jsonPlan).await();
+        List<String> expected =
+                Arrays.asList(
+                        "+I[1, HiHi, Hi2]",
+                        "+I[1, HiHi, Hi2]",
+                        "+I[1, HiHi, Hi3]",
+                        "+I[1, HiHi, Hi6]",
+                        "+I[1, HiHi, Hi8]",
+                        "+I[2, HeHe, Hi5]",
+                        "+I[null, HeHe, Hi9]");
+        assertResult(
+                expected.stream().sorted().collect(Collectors.toList()),
+                TestValuesTableFactory.getResults("MySink").stream()
+                        .sorted()
+                        .collect(Collectors.toList()));
+    }
+
+    @Test
+    public void testJoin() throws Exception {
+        createTestValuesSinkTable("MySink", "a3 varchar", "b4 varchar");
+        String jsonPlan =
+                tableEnv.getJsonPlan(
+                        "insert into MySink \n" + "SELECT a3, b4 FROM A, B 
WHERE a2 = b2");
+        tableEnv.executeJsonPlan(jsonPlan).await();
+        List<String> expected =
+                Arrays.asList(
+                        "+I[Hello world, Hallo Welt]", "+I[Hello, Hallo 
Welt]", "+I[Hi, Hallo]");
+        assertResult(

Review comment:
       ditto

##########
File path: 
flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/spec/JoinSpec.java
##########
@@ -21,35 +21,55 @@
 import org.apache.flink.table.runtime.operators.join.FlinkJoinType;
 import org.apache.flink.util.Preconditions;
 
+import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCreator;
+import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonIgnore;
+import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import 
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonProperty;
+
 import org.apache.calcite.rex.RexNode;
 
 import javax.annotation.Nullable;
 
+import java.util.Arrays;
+import java.util.Objects;
 import java.util.Optional;
 
 /**
  * JoinSpec describes how two tables will be joined.
  *
  * <p>This class corresponds to {@link org.apache.calcite.rel.core.Join} rel 
node.
  */
+@JsonIgnoreProperties(ignoreUnknown = true)
 public class JoinSpec {
+    public static final String FIELD_NAME_JOIN_TYPE = "joinType";
+    public static final String FIELD_NAME_LEFT_KEYS = "leftKeys";
+    public static final String FIELD_NAME_RIGHT_KEYS = "rightKeys";
+    public static final String FIELD_NAME_FILTER_NULLS = "filterNulls";
+    public static final String FIELD_NAME_NON_EQUI_CONDITION = 
"nonEquiCondition";
+
     /** {@link FlinkJoinType} of the join. */
+    @JsonProperty(FIELD_NAME_JOIN_TYPE)
     private final FlinkJoinType joinType;
     /** 0-based index of join keys in left side. */
+    @JsonProperty(FIELD_NAME_LEFT_KEYS)
     private final int[] leftKeys;
     /** 0-based index of join keys in right side. */
+    @JsonProperty(FIELD_NAME_RIGHT_KEYS)
     private final int[] rightKeys;
     /** whether to filter null values or not for each corresponding index join 
key. */
+    @JsonProperty(FIELD_NAME_FILTER_NULLS)
     private final boolean[] filterNulls;
     /** Non Equi join conditions. */
+    @JsonProperty(FIELD_NAME_NON_EQUI_CONDITION)
     private final @Nullable RexNode nonEquiCondition;
 
+    @JsonCreator
     public JoinSpec(
-            FlinkJoinType joinType,
-            int[] leftKeys,
-            int[] rightKeys,
-            boolean[] filterNulls,
-            @Nullable RexNode nonEquiCondition) {
+            @JsonProperty(FIELD_NAME_JOIN_TYPE) FlinkJoinType joinType,
+            @JsonProperty(FIELD_NAME_LEFT_KEYS) int[] leftKeys,
+            @JsonProperty(FIELD_NAME_RIGHT_KEYS) int[] rightKeys,
+            @JsonProperty(FIELD_NAME_FILTER_NULLS) boolean[] filterNulls,
+            @JsonProperty(FIELD_NAME_NON_EQUI_CONDITION) @Nullable RexNode 
nonEquiCondition) {
         this.joinType = Preconditions.checkNotNull(joinType);

Review comment:
       nit: It's better we can add some validation (check null, check the 
length of input properties) for the given arguments, because the created from 
json, while json may be changed by user. 

##########
File path: 
flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/nodes/exec/stream/JoinJsonPlanTest_jsonplan/testFullJoin.out
##########
@@ -0,0 +1,185 @@
+{
+  "flinkVersion" : "",
+  "nodes" : [ {
+    "class" : 
"org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecTableSourceScan",
+    "scanTableSource" : {
+      "identifier" : {
+        "catalogName" : "default_catalog",
+        "databaseName" : "default_database",
+        "tableName" : "A"
+      },
+      "catalogTable" : {
+        "schema.2.data-type" : "BIGINT",
+        "connector" : "values",
+        "schema.0.data-type" : "INT",
+        "schema.2.name" : "a3",
+        "schema.1.name" : "a2",
+        "bounded" : "false",
+        "schema.0.name" : "a1",
+        "schema.1.data-type" : "BIGINT"
+      },
+      "sourceAbilitySpecs" : [ {
+        "type" : "ProjectPushDown",
+        "projectedFields" : [ [ 0 ] ],
+        "producedType" : "ROW<`a1` INT> NOT NULL"
+      } ]
+    },
+    "id" : 296,
+    "outputType" : "ROW<`a1` INT>",
+    "description" : "TableSourceScan(table=[[default_catalog, 
default_database, A, project=[a1]]], fields=[a1])",
+    "inputProperties" : [ ]
+  }, {
+    "class" : 
"org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecExchange",
+    "id" : 297,
+    "inputProperties" : [ {
+      "requiredDistribution" : {
+        "type" : "HASH",
+        "keys" : [ 0 ]
+      },
+      "damBehavior" : "PIPELINED",
+      "priority" : 0
+    } ],
+    "outputType" : "ROW<`a1` INT>",
+    "description" : "Exchange(distribution=[hash[a1]])"
+  }, {
+    "class" : 
"org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecTableSourceScan",
+    "scanTableSource" : {
+      "identifier" : {
+        "catalogName" : "default_catalog",
+        "databaseName" : "default_database",
+        "tableName" : "B"
+      },
+      "catalogTable" : {
+        "schema.2.data-type" : "BIGINT",
+        "connector" : "values",
+        "schema.0.data-type" : "INT",
+        "schema.2.name" : "b3",
+        "schema.1.name" : "b2",
+        "bounded" : "false",
+        "schema.0.name" : "b1",
+        "schema.1.data-type" : "BIGINT"
+      },
+      "sourceAbilitySpecs" : [ {
+        "type" : "ProjectPushDown",
+        "projectedFields" : [ [ 0 ] ],
+        "producedType" : "ROW<`b1` INT> NOT NULL"
+      } ]
+    },
+    "id" : 298,
+    "outputType" : "ROW<`b1` INT>",
+    "description" : "TableSourceScan(table=[[default_catalog, 
default_database, B, project=[b1]]], fields=[b1])",
+    "inputProperties" : [ ]
+  }, {
+    "class" : 
"org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecExchange",
+    "id" : 299,
+    "inputProperties" : [ {
+      "requiredDistribution" : {
+        "type" : "HASH",
+        "keys" : [ 0 ]
+      },
+      "damBehavior" : "PIPELINED",
+      "priority" : 0
+    } ],
+    "outputType" : "ROW<`b1` INT>",
+    "description" : "Exchange(distribution=[hash[b1]])"
+  }, {
+    "class" : 
"org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecJoin",
+    "joinSpec" : {
+      "joinType" : "FULL",
+      "leftKeys" : [ 0 ],
+      "rightKeys" : [ 0 ],
+      "filterNulls" : [ true ],
+      "nonEquiCondition" : {
+        "kind" : "LITERAL",
+        "value" : true,
+        "type" : {
+          "typeName" : "BOOLEAN",
+          "nullable" : false
+        }
+      }

Review comment:
       I think it's unnecessary, we can set it to null if `nonEquiCondition` is 
always true




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to