cloud-fan commented on code in PR #48621: URL: https://github.com/apache/spark/pull/48621#discussion_r1848306730
########## sql/core/src/main/scala/org/apache/spark/sql/jdbc/MsSqlServerDialect.scala: ########## @@ -87,13 +87,28 @@ private case class MsSqlServerDialect() extends JdbcDialect with NoLegacyJDBCErr expr match { case e: Predicate => e.name() match { case "=" | "<>" | "<=>" | "<" | "<=" | ">" | ">=" => - val Array(l, r) = e.children().map { - case p: Predicate => s"CASE WHEN ${inputToSQL(p)} THEN 1 ELSE 0 END" - case o => inputToSQL(o) - } + val Array(l, r) = e.children().map(inputToSQL) visitBinaryComparison(e.name(), l, r) - case "CASE_WHEN" => visitCaseWhen(expressionsToStringArray(e.children())) + " = 1" - case _ => super.build(expr) + case "CASE_WHEN" => + // Since MsSqlServer cannot handle boolean expressions inside + // a CASE WHEN, it is necessary to convert those to another + // CASE WHEN expression that will return 1 or 0 depending on + // the result. + // Example: + // In: ... CASE WHEN a = b THEN c = d ... END + // Out: ... CASE WHEN a = b THEN CASE WHEN c = d THEN 1 ELSE 0 END ... END = 1 + val stringArray = e.children().grouped(2).flatMap { + case Array(whenExpression, thenExpression) => + Array(super.build(whenExpression), inputToSQL(thenExpression)) + case Array(elseExpression) => + Array(inputToSQL(elseExpression)) + }.toArray + + visitCaseWhen(stringArray) + " = 1" + // MsSqlServerDialect translates boolean literals to 1/0, no need to rewrite them. + case "ALWAYS_TRUE" | "ALWAYS_FALSE" => + super.build(expr) + case _ => predicateToIntSQL(e) Review Comment: ```suggestion case _ => predicateToIntSQL(super.build(e)) ``` -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org