Airblader commented on a change in pull request #17204:
URL: https://github.com/apache/flink/pull/17204#discussion_r705966840



##########
File path: 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/calcite/InsertValidationTest.java
##########
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.calcite;
+
+import org.apache.flink.table.api.DataTypes;
+import org.apache.flink.table.api.Schema;
+import org.apache.flink.table.api.ValidationException;
+import org.apache.flink.table.catalog.CatalogTable;
+import org.apache.flink.table.catalog.ObjectPath;
+import org.apache.flink.table.planner.utils.PlannerMocks;
+
+import org.apache.calcite.sql.SqlInsert;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Test for {@link FlinkCalciteSqlValidator#validateInsert(SqlInsert)}. */
+public class InsertValidationTest {

Review comment:
       I would make this a `FlinkCalciteSqlValidatorTest` instead. That'd be 
the class someone looking at the validator will be looking for, and for the 
moment we only have one test here anyway.

##########
File path: 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/calcite/InsertValidationTest.java
##########
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.calcite;
+
+import org.apache.flink.table.api.DataTypes;
+import org.apache.flink.table.api.Schema;
+import org.apache.flink.table.api.ValidationException;
+import org.apache.flink.table.catalog.CatalogTable;
+import org.apache.flink.table.catalog.ObjectPath;
+import org.apache.flink.table.planner.utils.PlannerMocks;
+
+import org.apache.calcite.sql.SqlInsert;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Test for {@link FlinkCalciteSqlValidator#validateInsert(SqlInsert)}. */
+public class InsertValidationTest {
+
+    private final PlannerMocks plannerMocks = PlannerMocks.createDefault();
+
+    @Before
+    public void setUp() throws Exception {
+        final ObjectPath path1 =
+                new 
ObjectPath(plannerMocks.getCatalogManager().getCurrentDatabase(), "t1");
+        final Schema tableSchema = Schema.newBuilder().column("a", 
DataTypes.INT()).build();
+        Map<String, String> options = new HashMap<>();
+        options.put("connector", "COLLECTION");

Review comment:
       I don't think we actually need to define a connector, do we? If so, I'd 
avoid it both to not have unnecessary setup code, but also to avoid using these 
old test connectors. Then we can also just use `Collections.emptyMap()`.

##########
File path: 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/calcite/InsertValidationTest.java
##########
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.calcite;
+
+import org.apache.flink.table.api.DataTypes;
+import org.apache.flink.table.api.Schema;
+import org.apache.flink.table.api.ValidationException;
+import org.apache.flink.table.catalog.CatalogTable;
+import org.apache.flink.table.catalog.ObjectPath;
+import org.apache.flink.table.planner.utils.PlannerMocks;
+
+import org.apache.calcite.sql.SqlInsert;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Test for {@link FlinkCalciteSqlValidator#validateInsert(SqlInsert)}. */
+public class InsertValidationTest {
+
+    private final PlannerMocks plannerMocks = PlannerMocks.createDefault();
+
+    @Before
+    public void setUp() throws Exception {
+        final ObjectPath path1 =

Review comment:
       nit: there's only one object path, so we don't need to enumerate it.

##########
File path: 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/calcite/InsertValidationTest.java
##########
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.calcite;
+
+import org.apache.flink.table.api.DataTypes;
+import org.apache.flink.table.api.Schema;
+import org.apache.flink.table.api.ValidationException;
+import org.apache.flink.table.catalog.CatalogTable;
+import org.apache.flink.table.catalog.ObjectPath;
+import org.apache.flink.table.planner.utils.PlannerMocks;
+
+import org.apache.calcite.sql.SqlInsert;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Test for {@link FlinkCalciteSqlValidator#validateInsert(SqlInsert)}. */
+public class InsertValidationTest {
+
+    private final PlannerMocks plannerMocks = PlannerMocks.createDefault();
+
+    @Before
+    public void setUp() throws Exception {
+        final ObjectPath path1 =
+                new 
ObjectPath(plannerMocks.getCatalogManager().getCurrentDatabase(), "t1");
+        final Schema tableSchema = Schema.newBuilder().column("a", 
DataTypes.INT()).build();
+        Map<String, String> options = new HashMap<>();
+        options.put("connector", "COLLECTION");
+
+        CatalogTable table = CatalogTable.of(tableSchema, null, 
Collections.emptyList(), options);
+
+        plannerMocks.getCurrentCatalog().createTable(path1, table, false);

Review comment:
       (You could also register it as a temporary table instead of in a catalog 
to remove the dependency on a catalog here, but it ultimately makes no 
difference).

##########
File path: 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/utils/PlannerMocks.java
##########
@@ -69,8 +77,25 @@ public static FlinkPlannerImpl createDefaultPlanner() {
                         functionCatalog.asLookup(parser::parseIdentifier),
                         catalogManager.getDataTypeFactory(),
                         parser::parseSqlExpression));
+    }
+
+    public FlinkPlannerImpl getPlanner() {
         return planner;
     }
 
-    private PlannerMocks() {}
+    public ParserImpl getParser() {
+        return parser;
+    }
+
+    public CatalogManager getCatalogManager() {
+        return catalogManager;
+    }
+
+    public Catalog getCurrentCatalog() {

Review comment:
       This method seems redundant, we already have access to the catalog 
manager.

##########
File path: 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/calcite/InsertValidationTest.java
##########
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.planner.calcite;
+
+import org.apache.flink.table.api.DataTypes;
+import org.apache.flink.table.api.Schema;
+import org.apache.flink.table.api.ValidationException;
+import org.apache.flink.table.catalog.CatalogTable;
+import org.apache.flink.table.catalog.ObjectPath;
+import org.apache.flink.table.planner.utils.PlannerMocks;
+
+import org.apache.calcite.sql.SqlInsert;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Test for {@link FlinkCalciteSqlValidator#validateInsert(SqlInsert)}. */
+public class InsertValidationTest {
+
+    private final PlannerMocks plannerMocks = PlannerMocks.createDefault();
+
+    @Before
+    public void setUp() throws Exception {

Review comment:
       In general I prefer doing this in each test, because typically different 
tests need different tables, and this creates a dependency between them (it's 
hard to tell whether some setup code is actually still needed). However, then 
we'd have to make sure to reset the state between tests (or have a separate 
instance of the mock per test). I'm OK with leaving this as-is, just wanted to 
share this thought.

##########
File path: 
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/calcite/FlinkCalciteSqlValidator.java
##########
@@ -125,4 +126,14 @@ public void validateColumnListParams(
         // this makes it possible to ignore them in the validator and fall 
back to regular row types
         // see also SqlFunction#deriveType
     }
+
+    @Override
+    public void validateInsert(SqlInsert insert) {
+        super.validateInsert(insert);
+
+        // We don't support UPSERT INTO semantics (see FLINK-24225).
+        if (insert.isUpsert()) {
+            throw new ValidationException("UPSERT INTO statement is not 
supported");

Review comment:
       Maybe we can make this message a bit more helpful?
   
   > UPSERT INTO is not yet supported. Please use INSERT INTO instead.

##########
File path: 
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/utils/PlannerMocks.java
##########
@@ -69,8 +77,25 @@ public static FlinkPlannerImpl createDefaultPlanner() {
                         functionCatalog.asLookup(parser::parseIdentifier),
                         catalogManager.getDataTypeFactory(),
                         parser::parseSqlExpression));
+    }
+
+    public FlinkPlannerImpl getPlanner() {
         return planner;
     }
 
-    private PlannerMocks() {}
+    public ParserImpl getParser() {
+        return parser;
+    }
+
+    public CatalogManager getCatalogManager() {
+        return catalogManager;
+    }
+
+    public Catalog getCurrentCatalog() {
+        return 
catalogManager.getCatalog(catalogManager.getCurrentCatalog()).get();
+    }
+
+    public static PlannerMocks createDefault() {

Review comment:
       nit: there is only the default, and nothing else. We could also just 
name this `create`.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@flink.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to