andrej-db commented on code in PR #48621:
URL: https://github.com/apache/spark/pull/48621#discussion_r1816953306


##########
sql/core/src/main/scala/org/apache/spark/sql/jdbc/MsSqlServerDialect.scala:
##########
@@ -88,11 +88,31 @@ private case class MsSqlServerDialect() extends JdbcDialect 
with NoLegacyJDBCErr
         case e: Predicate => e.name() match {
           case "=" | "<>" | "<=>" | "<" | "<=" | ">" | ">=" =>
             val Array(l, r) = e.children().map {
-              case p: Predicate => s"CASE WHEN ${inputToSQL(p)} THEN 1 ELSE 0 
END"
+              case p: Predicate => inputToCaseWhenSQL(p)
               case o => inputToSQL(o)
             }
             visitBinaryComparison(e.name(), l, r)
-          case "CASE_WHEN" => 
visitCaseWhen(expressionsToStringArray(e.children())) + " = 1"
+          case "CASE_WHEN" =>
+            // Since MsSqlServer cannot handle boolean expressions inside
+            // a CASE WHEN, it is necessary to convert those to another
+            // CASE WHEN expression that will return 1 or 0 depending on
+            // the result. Exceptions are TRUE and FALSE, which already
+            // get translated to 1 and 0.
+            // Example:
+            // In:  ... CASE WHEN a = b THEN c = d ... END
+            // Out: ... CASE WHEN a = b THEN CASE WHEN c = d THEN 1 ELSE 0 END 
... END = 1
+
+            // grouped turns Array[Expression] to Array[Array[Expression]]
+            // with a len of max 2 (final one will have only one)
+            val stringArray = e.children().grouped(2).flatMap { arr =>
+              arr.dropRight(1).map(inputToSQL) :+

Review Comment:
   done.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to