dejankrak-db commented on code in PR #49103:
URL: https://github.com/apache/spark/pull/49103#discussion_r1899000501


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CollationTypeCoercion.scala:
##########
@@ -461,6 +421,66 @@ object CollationTypeCoercion {
         else right
     }
   }
+
+  /**
+   * Throws an analysis exception if the new data type has indeterminate 
collation,
+   * and the expression is not allowed to have inputs with indeterminate 
collations.
+   */
+  private def checkIndeterminateCollation(expression: Expression, newDataType: 
DataType): Unit = {
+    if (shouldFailWithIndeterminateCollation(expression, newDataType)) {
+      expression.failAnalysis(
+        errorClass = "INDETERMINATE_COLLATION_IN_EXPRESSION",
+        messageParameters = Map("expr" -> toSQLExpr(expression)))
+    }
+  }
+
+  /**
+   * Returns whether the given expression which isn't allowed to have inputs 
with indeterminate

Review Comment:
   Consider rewording the comment to:
   Returns whether the given expression has indeterminate collation in case it 
isn't allowed to have inputs with indeterminate collations, and thus should 
fail.
   
   In the original wording, it was a bit ambiguous whether the given expression 
was implicitly not allowed to have inputs with indeterminate collation, or 
whether this was also evaluated as part of the method (with the latter being 
the case looking at the underlying implementation).



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CollationTypeCoercion.scala:
##########
@@ -461,6 +421,66 @@ object CollationTypeCoercion {
         else right
     }
   }
+
+  /**
+   * Throws an analysis exception if the new data type has indeterminate 
collation,
+   * and the expression is not allowed to have inputs with indeterminate 
collations.
+   */
+  private def checkIndeterminateCollation(expression: Expression, newDataType: 
DataType): Unit = {
+    if (shouldFailWithIndeterminateCollation(expression, newDataType)) {
+      expression.failAnalysis(
+        errorClass = "INDETERMINATE_COLLATION_IN_EXPRESSION",
+        messageParameters = Map("expr" -> toSQLExpr(expression)))
+    }
+  }
+
+  /**
+   * Returns whether the given expression which isn't allowed to have inputs 
with indeterminate
+   * collations has indeterminate collation.
+   */
+  private def shouldFailWithIndeterminateCollation(expression: Expression): 
Boolean = {
+    def getDataTypeSafe(e: Expression): DataType = try {
+      e.dataType
+    } catch {
+      case _: Throwable => NullType
+    }
+
+    expression.children.exists(child => expression.resolved &&

Review Comment:
   Nit: no need to check expression.resolved for every child, i.e. it can be 
moved outside in front of expression.children.exists(), right?



##########
common/unsafe/src/main/java/org/apache/spark/sql/catalyst/util/CollationFactory.java:
##########
@@ -200,7 +203,11 @@ public Collation(
       // No Collation can simultaneously support binary equality and lowercase 
equality
       assert(!supportsBinaryEquality || !supportsLowercaseEquality);
 

Review Comment:
   Please add comment why PROVIDER_NULL is now acceptable, in addition to 
SUPPORTED_PROVIDERS



##########
common/unsafe/src/main/java/org/apache/spark/sql/catalyst/util/CollationFactory.java:
##########
@@ -1082,6 +1096,43 @@ static CollationMeta 
loadCollationMeta(CollationIdentifier collationIdentifier)
       }
     }
 
+    /**
+     * Collation that is a result of two different non-explicit collation.

Review Comment:
   nit: collation -> collations at the end



##########
common/unsafe/src/main/java/org/apache/spark/sql/catalyst/util/CollationFactory.java:
##########
@@ -1082,6 +1096,43 @@ static CollationMeta 
loadCollationMeta(CollationIdentifier collationIdentifier)
       }
     }
 
+    /**
+     * Collation that is a result of two different non-explicit collation.
+     */
+    private static class IndeterminateCollation extends Collation {
+
+      IndeterminateCollation() {
+        super(
+          "null",

Review Comment:
   Observation: Using named parameters here and below would improve 
readability, though I acknowledge that it may be a thing of personal preference.



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CollationTypeCoercion.scala:
##########
@@ -461,6 +421,66 @@ object CollationTypeCoercion {
         else right
     }
   }
+
+  /**
+   * Throws an analysis exception if the new data type has indeterminate 
collation,
+   * and the expression is not allowed to have inputs with indeterminate 
collations.
+   */
+  private def checkIndeterminateCollation(expression: Expression, newDataType: 
DataType): Unit = {
+    if (shouldFailWithIndeterminateCollation(expression, newDataType)) {
+      expression.failAnalysis(
+        errorClass = "INDETERMINATE_COLLATION_IN_EXPRESSION",
+        messageParameters = Map("expr" -> toSQLExpr(expression)))
+    }
+  }
+
+  /**
+   * Returns whether the given expression which isn't allowed to have inputs 
with indeterminate
+   * collations has indeterminate collation.
+   */
+  private def shouldFailWithIndeterminateCollation(expression: Expression): 
Boolean = {
+    def getDataTypeSafe(e: Expression): DataType = try {
+      e.dataType
+    } catch {
+      case _: Throwable => NullType
+    }
+
+    expression.children.exists(child => expression.resolved &&
+      shouldFailWithIndeterminateCollation(expression, getDataTypeSafe(child)))
+  }
+
+  /**
+   * Returns whether the given expression should fail with indeterminate 
collation if it is cast
+   * to the given data type.
+   */
+  private def shouldFailWithIndeterminateCollation(
+      expression: Expression,
+      dataType: DataType): Boolean = {
+    !canContainIndeterminateCollation(expression) && 
hasIndeterminateCollation(dataType)
+  }
+
+  /**
+   * Returns whether the given data type has indeterminate collation.
+   */
+  private def hasIndeterminateCollation(dataType: DataType): Boolean = {
+    dataType.existsRecursively {
+      case IndeterminateStringType | StringTypeWithContext(_, Indeterminate) 
=> true
+      case _ => false
+    }
+  }
+
+  /**
+   * Returns whether the given expression can contain indeterminate collation.
+   */
+  private def canContainIndeterminateCollation(expr: Expression): Boolean = 
expr match {
+    // This is not an exhaustive list, and it's fine to miss some expressions. 
The only difference

Review Comment:
   Perhaps a comment with a guideline for engineers adding further expressions 
in the future would be helpful: In case the new expression can contain 
indeterminate collation, it should be added to the list here, to the best of 
knowledge. Still, even if that is not the case, there is still runtime handling 
that will ensure that the expression will fail accordingly (though the first 
path is preferable as it saves burning some extra cycles).



##########
sql/core/src/test/scala/org/apache/spark/sql/collation/IndeterminateCollationTestSuite.scala:
##########
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.collation
+
+import org.apache.spark.{SparkRuntimeException, SparkThrowable}
+import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest, Row}
+import org.apache.spark.sql.test.SharedSparkSession
+import org.apache.spark.sql.types.StringType
+
+class IndeterminateCollationTestSuite extends QueryTest with 
SharedSparkSession {
+
+  val testTableName = "tst_table"
+  val dataSource = "parquet"
+
+  def withTestTable(testCode: => Unit): Unit = {
+    withTable(testTableName) {
+      sql(s"""
+           |CREATE TABLE $testTableName (
+           |  c1 STRING COLLATE UTF8_LCASE,

Review Comment:
   Is there additional value in also parameterizing the two explicit collations 
used here and mixing them up throughout the test cases below, as opposed to 
always using UTF8_LCASE and UTF8_BINARY?



##########
sql/core/src/test/scala/org/apache/spark/sql/collation/IndeterminateCollationTestSuite.scala:
##########
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.collation
+
+import org.apache.spark.{SparkRuntimeException, SparkThrowable}
+import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest, Row}
+import org.apache.spark.sql.test.SharedSparkSession
+import org.apache.spark.sql.types.StringType
+
+class IndeterminateCollationTestSuite extends QueryTest with 
SharedSparkSession {
+
+  val testTableName = "tst_table"
+  val dataSource = "parquet"
+
+  def withTestTable(testCode: => Unit): Unit = {
+    withTable(testTableName) {
+      sql(s"""
+           |CREATE TABLE $testTableName (
+           |  c1 STRING COLLATE UTF8_LCASE,
+           |  c2 STRING COLLATE UTF8_BINARY
+           |) USING $dataSource
+           |""".stripMargin)
+      testCode
+    }
+  }
+
+  def assertIndeterminateCollationInExpressionError(query: => DataFrame): Unit 
= {
+    val exception = intercept[AnalysisException] {
+      query
+    }
+    assert(exception.getCondition === "INDETERMINATE_COLLATION_IN_EXPRESSION")
+  }
+
+  def assertRuntimeIndeterminateCollationError(query: => DataFrame): Unit = {
+    val exception = intercept[SparkRuntimeException] {
+      query.collect()
+    }
+    assert(exception.getCondition === "INDETERMINATE_COLLATION")
+  }
+
+  def assertIndeterminateCollationInSchemaError(columnPaths: String*)(
+      query: => DataFrame): Unit = {
+    checkError(
+      exception = intercept[AnalysisException] {
+        query.collect()
+      },
+      condition = "INDETERMINATE_COLLATION_IN_SCHEMA",
+      parameters = Map("columnPaths" -> columnPaths.mkString(", ")))
+  }
+
+  test("cannot use indeterminate collation name") {
+    checkError(
+      intercept[SparkThrowable] {
+        sql("SELECT 'a' COLLATE NULL")
+      },
+      "COLLATION_INVALID_NAME",
+      parameters = Map("proposals" -> "nl", "collationName" -> "NULL"))
+
+    checkError(
+      intercept[SparkThrowable] {
+        sql("SELECT CAST('a' AS STRING COLLATE NULL)")
+      },
+      "COLLATION_INVALID_NAME",
+      parameters = Map("proposals" -> "nl", "collationName" -> "NULL"))
+
+    intercept[SparkThrowable] {
+      StringType("NULL")
+    }
+  }
+
+  test("various expressions that support indeterminate collation") {
+    withTestTable {
+      sql(s"INSERT INTO $testTableName VALUES ('a', 'b')")
+
+      val expressions = Seq(
+        "c1 || c2",
+        "concat(c1, c2)",
+        "concat_ws(' ', c1, c2)",
+        "length(c1 || c2)",
+        "array(c1 || c2)",
+        "map('a', c1 || c2)",
+        "named_struct('f1', c1 || c2, 'f2', c2)",
+        "repeat(c1 || c2, 2)",
+        "elt(1, c1 || c2, c2)",
+        "coalesce(c1 || c2, c2)")
+
+      expressions.foreach { expr =>
+        sql(s"SELECT $expr FROM $testTableName").collect()
+      }
+
+      checkAnswer(sql(s"SELECT COLLATION(c1 || c2) FROM $testTableName"), 
Seq(Row("null")))
+    }
+  }
+
+  test("expressions that don't support indeterminate collations and fail in 
analyzer") {
+    withTestTable {
+      sql(s"INSERT INTO $testTableName VALUES ('a', 'b')")
+
+      val expressions = Seq(
+        "c1 = c2",
+        "c1 != c2",
+        "c1 > c2",
+        "STARTSWITH(c1 || c2, c1)",
+        "ENDSWITH(c1 || c2, c2)",
+        "UPPER(c1 || c2) = 'AB'",
+        "INITCAP(c1 || c2) = 'Ab'",
+        "FIND_IN_SET(c1 || c2, 'a,b')",
+        "INSTR(c1 || c2, c1)",
+        "LOCATE(c1, c1 || c2)")
+
+      expressions.foreach { expr =>
+        assertIndeterminateCollationInExpressionError {
+          sql(s"SELECT $expr FROM $testTableName")
+        }
+      }
+    }
+  }
+
+  test("expressions that don't support indeterminate collation and fail in 
runtime") {
+    withTestTable {
+      sql(s"INSERT INTO $testTableName VALUES ('a', 'b')")
+
+      val expressions = Seq("str_to_map(c1 || c2, 'a', 'b')")
+
+      expressions.foreach { expr =>
+        assertRuntimeIndeterminateCollationError {
+          sql(s"SELECT $expr FROM $testTableName")
+        }
+      }
+    }
+  }
+
+  test("insert works with indeterminate collation") {
+    withTestTable {
+      sql(s"""
+           |INSERT INTO $testTableName
+           |SELECT c1 || c2, c2 || c1
+           |FROM VALUES ('a', 'b') AS t(c1, c2)
+           |""".stripMargin)
+
+      checkAnswer(sql(s"SELECT * FROM $testTableName"), Seq(Row("ab", "ba")))
+    }
+  }
+
+  test("create table as select fails with indeterminate collation") {
+    withTestTable {
+      assertIndeterminateCollationInSchemaError("concat(c1, c2)") {
+        sql(s"""
+             |CREATE TABLE t AS
+             |SELECT c1 || c2 FROM $testTableName
+             |""".stripMargin)
+      }
+
+      assertIndeterminateCollationInSchemaError("col") {
+        sql(s"""
+             |CREATE TABLE t AS
+             |SELECT concat_ws(', ', c1, c2) as col FROM $testTableName
+             |""".stripMargin)
+      }
+
+      assertIndeterminateCollationInSchemaError("arr.element", "map.value", 
"struct.f1")(sql(s"""
+             |CREATE TABLE t
+             |USING $dataSource
+             |AS SELECT
+             |  array(c1 || c2) AS arr,
+             |  map('a', c1 || c2) AS map,
+             |  named_struct('f1', c1 || c2, 'f2', c2) AS struct
+             |FROM $testTableName
+             |""".stripMargin))
+    }
+  }
+
+  test("can create a view with indeterminate collation") {
+    withTestTable {
+      sql(s"INSERT INTO $testTableName VALUES ('a', 'b')")
+      sql(s"INSERT INTO $testTableName VALUES ('c', 'd')")
+
+      withView("v") {
+        sql(s"""
+             |CREATE VIEW v AS
+             |SELECT c1 || c2 as col FROM $testTableName
+             |""".stripMargin)
+
+        checkAnswer(sql("SELECT * FROM v"), Seq(Row("ab"), Row("cd")))
+        checkAnswer(sql("SELECT DISTINCT COLLATION(col) FROM v"), 
Seq(Row("null")))
+
+        // group by should fail in runtime when fetching the collator
+        assertRuntimeIndeterminateCollationError {
+          sql(s"SELECT * FROM v GROUP BY col")
+        }

Review Comment:
   Should we also, as part of this test case, cover the happy path of fixing 
the underlying indeterminate collation issue by altering one column of the 
table to have the same collation as the other column, and then validating that 
the group by query on top of view succeeds (not sure if the view will need to 
be altered as well to pick up table changes though)?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to