This is an automated email from the ASF dual-hosted git repository.

agrove pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git


The following commit(s) were added to refs/heads/main by this push:
     new 7a874616e docs: document negative zero cast-to-string incompatibility 
(#3811)
7a874616e is described below

commit 7a874616e88e51115ca3c51ba12fa7991386759f
Author: Andy Grove <[email protected]>
AuthorDate: Fri Mar 27 13:40:57 2026 -0600

    docs: document negative zero cast-to-string incompatibility (#3811)
---
 docs/source/user-guide/latest/compatibility.md              |  7 +++++++
 .../main/scala/org/apache/comet/expressions/CometCast.scala |  1 +
 spark/src/main/scala/org/apache/comet/serde/arrays.scala    | 13 +++++++++++--
 3 files changed, 19 insertions(+), 2 deletions(-)

diff --git a/docs/source/user-guide/latest/compatibility.md 
b/docs/source/user-guide/latest/compatibility.md
index e0bc5f06e..3ec665618 100644
--- a/docs/source/user-guide/latest/compatibility.md
+++ b/docs/source/user-guide/latest/compatibility.md
@@ -153,6 +153,13 @@ Cast operations in Comet fall into three levels of support:
   Spark.
 - **N/A**: Spark does not support this cast.
 
+### Negative Zero
+
+When casting floating-point values to strings, Spark normalizes negative zero 
(`-0.0`) to `"0.0"`, but Comet
+may produce `"-0.0"`. Since negative zero and positive zero are semantically 
equivalent (`-0.0 == 0.0` is true
+in IEEE 754), this difference is unlikely to affect real-world results. See
+[#1036](https://github.com/apache/datafusion-comet/issues/1036) for more 
details.
+
 ### Legacy Mode
 
 <!--BEGIN:CAST_LEGACY_TABLE-->
diff --git a/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala 
b/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala
index d50aa5d8d..2188f8e9a 100644
--- a/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala
+++ b/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala
@@ -149,6 +149,7 @@ object CometCast extends CometExpressionSerde[Cast] with 
CometExprShim {
         isSupported(dt.elementType, dt1.elementType, timeZoneId, evalMode)
       case (dt: DataType, _) if dt.typeName == "timestamp_ntz" =>
         // https://github.com/apache/datafusion-comet/issues/378
+        // https://github.com/apache/datafusion-comet/issues/3179
         toType match {
           case DataTypes.TimestampType | DataTypes.DateType | 
DataTypes.StringType =>
             Incompatible()
diff --git a/spark/src/main/scala/org/apache/comet/serde/arrays.scala 
b/spark/src/main/scala/org/apache/comet/serde/arrays.scala
index 5d10ff8a3..47a6e9142 100644
--- a/spark/src/main/scala/org/apache/comet/serde/arrays.scala
+++ b/spark/src/main/scala/org/apache/comet/serde/arrays.scala
@@ -247,7 +247,12 @@ object CometArrayMin extends 
CometExpressionSerde[ArrayMin] {
 
 object CometArraysOverlap extends CometExpressionSerde[ArraysOverlap] {
 
-  override def getSupportLevel(expr: ArraysOverlap): SupportLevel = 
Incompatible(None)
+  override def getSupportLevel(expr: ArraysOverlap): SupportLevel =
+    Incompatible(
+      Some(
+        "Inconsistent behavior with NULL values" +
+          " (https://github.com/apache/datafusion-comet/issues/3645)" +
+          " (https://github.com/apache/datafusion-comet/issues/2036)"))
 
   override def convert(
       expr: ArraysOverlap,
@@ -446,7 +451,11 @@ object CometArrayInsert extends 
CometExpressionSerde[ArrayInsert] {
 
 object CometArrayUnion extends CometExpressionSerde[ArrayUnion] {
 
-  override def getSupportLevel(expr: ArrayUnion): SupportLevel = 
Incompatible(None)
+  override def getSupportLevel(expr: ArrayUnion): SupportLevel =
+    Incompatible(
+      Some(
+        "Correctness issue" +
+          " (https://github.com/apache/datafusion-comet/issues/3644)"))
 
   override def convert(
       expr: ArrayUnion,


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to