comphead commented on code in PR #18205:
URL: https://github.com/apache/datafusion/pull/18205#discussion_r2450024930


##########
datafusion/sqllogictest/test_files/spark/math/abs.slt:
##########
@@ -23,10 +23,103 @@
 
 ## Original Query: SELECT abs(-1);
 ## PySpark 3.5.5 Result: {'abs(-1)': 1, 'typeof(abs(-1))': 'int', 
'typeof(-1)': 'int'}
-#query
-#SELECT abs(-1::int);
+query I
+SELECT abs(-1::int);
+----
+1
+
+statement ok
+CREATE TABLE test_nullable_integer(
+    c1 TINYINT,
+    c2 SMALLINT,
+    c3 INT,
+    c4 BIGINT,
+    dataset TEXT
+    )
+    AS VALUES
+    (NULL, NULL, NULL, NULL, 'nulls'),
+    (0, 0, 0, 0, 'zeros'),
+    (1, 1, 1, 1, 'ones');
+
+query I
+INSERT into test_nullable_integer values(-128, -32768, -2147483648, 
-9223372036854775808, 'mins');
+----
+1
+
+# abs: signed int minimal values
+query IIII
+select abs(c1), abs(c2), abs(c3), abs(c4) from test_nullable_integer where 
dataset = 'mins'
+----
+-128 -32768 -2147483648 -9223372036854775808
+
+statement ok
+drop table test_nullable_integer
+
+statement ok
+CREATE TABLE test_nullable_float(
+    c1 float,
+    c2 double
+    ) AS VALUES
+    (-1.0, -1.0),
+    (1.0, 1.0),
+    (NULL, NULL),
+    (0., 0.),
+    ('NaN'::double, 'NaN'::double);
+
+# abs: floats
+query RR rowsort
+SELECT abs(c1), abs(c2) from test_nullable_float
+----
+0 0
+1 1
+1 1
+NULL NULL
+NaN NaN
+
+statement ok
+drop table test_nullable_float
+
+statement ok
+CREATE TABLE test_nullable_decimal(
+    c1 DECIMAL(10, 2),    /* Decimal128 */
+    c2 DECIMAL(38, 10),   /* Decimal128 with max precision */
+    c3 DECIMAL(40, 2),    /* Decimal256 */
+    c4 DECIMAL(76, 10)    /* Decimal256 with max precision */
+ ) AS VALUES
+    (0, 0, 0, 0),
+    (NULL, NULL, NULL, NULL);
+
+query I
+INSERT into test_nullable_decimal values
+    (
+        -99999999.99,
+        '-9999999999999999999999999999.9999999999',
+        '-99999999999999999999999999999999999999.99',
+        
'-999999999999999999999999999999999999999999999999999999999999999999.9999999999'
+    ),
+    (
+        99999999.99,
+        '9999999999999999999999999999.9999999999',
+        '99999999999999999999999999999999999999.99',
+        
'999999999999999999999999999999999999999999999999999999999999999999.9999999999'
+    )
+----
+2
+
+# abs: decimals
+query RRRR rowsort
+SELECT abs(c1), abs(c2), abs(c3), abs(c4) FROM test_nullable_decimal
+----
+0 0 0 0
+99999999.99 9999999999999999999999999999.9999999999 
99999999999999999999999999999999999999.99 
999999999999999999999999999999999999999999999999999999999999999999.9999999999
+99999999.99 9999999999999999999999999999.9999999999 
99999999999999999999999999999999999999.99 
999999999999999999999999999999999999999999999999999999999999999999.9999999999
+NULL NULL NULL NULL
+
+
+statement ok
+drop table test_nullable_decimal
 
 ## Original Query: SELECT abs(INTERVAL -'1-1' YEAR TO MONTH);
 ## PySpark 3.5.5 Result: {"abs(INTERVAL '-1-1' YEAR TO MONTH)": 13, 
"typeof(abs(INTERVAL '-1-1' YEAR TO MONTH))": 'interval year to month', 
"typeof(INTERVAL '-1-1' YEAR TO MONTH)": 'interval year to month'}
-#query
-#SELECT abs(INTERVAL '-1-1' YEAR TO MONTH::interval year to month);
+query error DataFusion error: This feature is not implemented: Unsupported SQL 
type INTERVAL YEAR TO MONTH

Review Comment:
   Lets create a github ticket to fix this and refer to it in the comments in 
addition to the error.
   
   Looks like abs works with intervals for Spark only



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to