twalthr commented on code in PR #27777: URL: https://github.com/apache/flink/pull/27777#discussion_r2958353702
########## flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/stream/sql/ToChangelogTest.java: ########## @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.table.planner.plan.stream.sql; + +import org.apache.flink.table.api.TableConfig; +import org.apache.flink.table.planner.utils.TableTestBase; +import org.apache.flink.table.planner.utils.TableTestUtil; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static org.apache.flink.core.testutils.FlinkAssertions.anyCauseMatches; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +/** Tests for the TO_CHANGELOG built-in process table function. */ +public class ToChangelogTest extends TableTestBase { Review Comment: > Testing argument validation errors feels correct here. This we can also do with semantic tests nowadays. > show the changelog mode coming out of each node This is a valid argument for me. In general, my goal is to keep the code base lean while having sufficient test coverage. Testing the same via different test bases should be avoided as much as possible. Otherwise people (and in the future AI) will add tests to multiple classes, instead to semantic tests only. ########## flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/stream/sql/ToChangelogTest.java: ########## @@ -18,66 +18,94 @@ package org.apache.flink.table.planner.plan.stream.sql; +import org.apache.flink.table.api.ExplainDetail; import org.apache.flink.table.api.TableConfig; +import org.apache.flink.table.planner.utils.JavaScalaConversionUtil; import org.apache.flink.table.planner.utils.TableTestBase; import org.apache.flink.table.planner.utils.TableTestUtil; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import java.util.Collections; + import static org.apache.flink.core.testutils.FlinkAssertions.anyCauseMatches; import static org.assertj.core.api.Assertions.assertThatThrownBy; -/** Tests for the TO_CHANGELOG built-in process table function. */ +/** + * Plan tests for the TO_CHANGELOG built-in process table function. Uses {@link + * ExplainDetail#CHANGELOG_MODE} to verify changelog mode propagation through the plan. + */ public class ToChangelogTest extends TableTestBase { private TableTestUtil util; @BeforeEach void setup() { util = streamTestUtil(TableConfig.getDefault()); + } + + @Test + void testRetractSource() { util.tableEnv() .executeSql( - "CREATE TABLE t (" + "CREATE TABLE retract_source (" + " id INT," + " name STRING," - + " val BIGINT" - + ") WITH ('connector' = 'values')"); - util.tableEnv() - .executeSql( - "CREATE VIEW t_updating AS SELECT id, name, COUNT(*) AS cnt FROM t GROUP BY id, name"); - } - - @Test - void testDefaultCall() { - util.verifyRelPlan("SELECT * FROM TO_CHANGELOG(input => TABLE t_updating PARTITION BY id)"); - } - - @Test - void testCustomOpName() { + + " PRIMARY KEY (id) NOT ENFORCED" + + ") WITH (" + + " 'connector' = 'values'," + + " 'changelog-mode' = 'I,UB,UA,D'" + + ")"); util.verifyRelPlan( - "SELECT * FROM TO_CHANGELOG(" - + "input => TABLE t_updating PARTITION BY id, " - + "op => DESCRIPTOR(op_code))"); + "SELECT * FROM TO_CHANGELOG(input => TABLE retract_source PARTITION BY id)", + JavaScalaConversionUtil.toScala( Review Comment: When a Java class has to do things like conversion. Always feel free to update the base, to not require this anymore by overloading. ########## flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/ptf/ToChangelogFunction.java: ########## @@ -108,18 +121,13 @@ public void eval( final RowData input, @Nullable final ColumnList op, @Nullable final Map<String, String> opMapping) { Review Comment: Also this should be MapData. Ideally our built in function do not need any conversion of data structures. Otherwise the runtime needs to convert on every invoke. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
