This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion.git


The following commit(s) were added to refs/heads/main by this push:
     new c1352360bd Improve error messages with nicer formatting of Date and 
Time types (#19954)
c1352360bd is described below

commit c1352360bd6b7f2373e5bdd37da85f2c9ca95e76
Author: Emil Ernerfeldt <[email protected]>
AuthorDate: Sat Jan 24 13:16:59 2026 +0100

    Improve error messages with nicer formatting of Date and Time types (#19954)
    
    * Follow-up to: https://github.com/apache/datafusion/pull/17565
    * Related: https://github.com/apache/arrow-rs/pull/8290
    
    ## Rationale for this change
    I believe that error messages should be as readable as possible. Aim for
    `rustc` more than `gcc`.
    
    `Display` is the nice, user-facing formatter. `Debug` is for… well,
    debugging.
    
    ## What changes are included in this PR?
    Change a bunch of `{:?}` format string to `{}`. I'm sure I missed a lot
    of them, because I know of no way to enforce this without
    * https://github.com/rust-lang/rust-clippy/issues/8581
    
    ## Are these changes tested?
    I assume CI runs `cargo test` :)
    
    ## Are there any user-facing changes?
    Yes! Error messages should be a bit more readable now.
    
    ---------
    
    Co-authored-by: Tim Saucer <[email protected]>
---
 datafusion/common/src/types/native.rs              | 52 +++++++++++++-
 datafusion/core/src/datasource/file_format/avro.rs |  4 +-
 .../core/src/datasource/file_format/parquet.rs     |  6 +-
 datafusion/core/src/physical_planner.rs            |  2 +-
 datafusion/core/tests/dataframe/mod.rs             |  2 +-
 datafusion/datasource/src/sink.rs                  |  2 +-
 datafusion/expr/src/logical_plan/display.rs        |  8 +--
 datafusion/expr/src/type_coercion/functions.rs     |  2 +-
 datafusion/functions-nested/src/array_has.rs       |  2 +-
 datafusion/functions-nested/src/flatten.rs         |  2 +-
 datafusion/functions-nested/src/map.rs             |  2 +-
 datafusion/functions-nested/src/utils.rs           |  2 +-
 datafusion/functions/src/core/union_tag.rs         |  2 +-
 datafusion/functions/src/datetime/to_char.rs       |  4 +-
 .../src/decorrelate_predicate_subquery.rs          |  4 +-
 .../src/equivalence/properties/mod.rs              |  2 +-
 datafusion/physical-expr/src/expressions/cast.rs   | 12 +---
 .../physical-expr/src/expressions/cast_column.rs   |  2 +-
 .../physical-expr/src/expressions/try_cast.rs      |  6 +-
 .../spark/src/function/string/format_string.rs     |  2 +-
 datafusion/spark/src/function/url/parse_url.rs     | 13 +++-
 datafusion/sqllogictest/test_files/aggregate.slt   | 10 +--
 datafusion/sqllogictest/test_files/array.slt       |  4 +-
 .../sqllogictest/test_files/arrow_typeof.slt       | 16 ++---
 .../sqllogictest/test_files/datetime/date_part.slt | 30 ++++----
 .../test_files/datetime/timestamps.slt             | 84 +++++++++++-----------
 datafusion/sqllogictest/test_files/dictionary.slt  |  4 +-
 datafusion/sqllogictest/test_files/explain.slt     | 12 ++--
 datafusion/sqllogictest/test_files/group_by.slt    |  2 +-
 .../sqllogictest/test_files/information_schema.slt | 24 +++----
 datafusion/sqllogictest/test_files/joins.slt       |  8 +--
 datafusion/sqllogictest/test_files/scalar.slt      |  4 +-
 .../test_files/spark/string/format_string.slt      | 32 ++++-----
 33 files changed, 206 insertions(+), 157 deletions(-)

diff --git a/datafusion/common/src/types/native.rs 
b/datafusion/common/src/types/native.rs
index 766c504416..5ef90b7209 100644
--- a/datafusion/common/src/types/native.rs
+++ b/datafusion/common/src/types/native.rs
@@ -186,7 +186,57 @@ pub enum NativeType {
 
 impl Display for NativeType {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        write!(f, "{self:?}") // TODO: nicer formatting
+        // Match the format used by arrow::datatypes::DataType's Display impl
+        match self {
+            Self::Null => write!(f, "Null"),
+            Self::Boolean => write!(f, "Boolean"),
+            Self::Int8 => write!(f, "Int8"),
+            Self::Int16 => write!(f, "Int16"),
+            Self::Int32 => write!(f, "Int32"),
+            Self::Int64 => write!(f, "Int64"),
+            Self::UInt8 => write!(f, "UInt8"),
+            Self::UInt16 => write!(f, "UInt16"),
+            Self::UInt32 => write!(f, "UInt32"),
+            Self::UInt64 => write!(f, "UInt64"),
+            Self::Float16 => write!(f, "Float16"),
+            Self::Float32 => write!(f, "Float32"),
+            Self::Float64 => write!(f, "Float64"),
+            Self::Timestamp(unit, Some(tz)) => write!(f, "Timestamp({unit}, 
{tz:?})"),
+            Self::Timestamp(unit, None) => write!(f, "Timestamp({unit})"),
+            Self::Date => write!(f, "Date"),
+            Self::Time(unit) => write!(f, "Time({unit})"),
+            Self::Duration(unit) => write!(f, "Duration({unit})"),
+            Self::Interval(unit) => write!(f, "Interval({unit:?})"),
+            Self::Binary => write!(f, "Binary"),
+            Self::FixedSizeBinary(size) => write!(f, 
"FixedSizeBinary({size})"),
+            Self::String => write!(f, "String"),
+            Self::List(field) => write!(f, "List({})", field.logical_type),
+            Self::FixedSizeList(field, size) => {
+                write!(f, "FixedSizeList({size} x {})", field.logical_type)
+            }
+            Self::Struct(fields) => {
+                write!(f, "Struct(")?;
+                for (i, field) in fields.iter().enumerate() {
+                    if i > 0 {
+                        write!(f, ", ")?;
+                    }
+                    write!(f, "{:?}: {}", field.name, field.logical_type)?;
+                }
+                write!(f, ")")
+            }
+            Self::Union(fields) => {
+                write!(f, "Union(")?;
+                for (i, (type_id, field)) in fields.iter().enumerate() {
+                    if i > 0 {
+                        write!(f, ", ")?;
+                    }
+                    write!(f, "{type_id}: ({:?}: {})", field.name, 
field.logical_type)?;
+                }
+                write!(f, ")")
+            }
+            Self::Decimal(precision, scale) => write!(f, "Decimal({precision}, 
{scale})"),
+            Self::Map(field) => write!(f, "Map({})", field.logical_type),
+        }
     }
 }
 
diff --git a/datafusion/core/src/datasource/file_format/avro.rs 
b/datafusion/core/src/datasource/file_format/avro.rs
index cad35d43db..7cf23ee294 100644
--- a/datafusion/core/src/datasource/file_format/avro.rs
+++ b/datafusion/core/src/datasource/file_format/avro.rs
@@ -95,7 +95,7 @@ mod tests {
             .schema()
             .fields()
             .iter()
-            .map(|f| format!("{}: {:?}", f.name(), f.data_type()))
+            .map(|f| format!("{}: {}", f.name(), f.data_type()))
             .collect();
         assert_eq!(
             vec![
@@ -109,7 +109,7 @@ mod tests {
                 "double_col: Float64",
                 "date_string_col: Binary",
                 "string_col: Binary",
-                "timestamp_col: Timestamp(Microsecond, None)",
+                "timestamp_col: Timestamp(µs)",
             ],
             x
         );
diff --git a/datafusion/core/src/datasource/file_format/parquet.rs 
b/datafusion/core/src/datasource/file_format/parquet.rs
index 47ce519f01..a6db617907 100644
--- a/datafusion/core/src/datasource/file_format/parquet.rs
+++ b/datafusion/core/src/datasource/file_format/parquet.rs
@@ -815,7 +815,7 @@ mod tests {
             .schema()
             .fields()
             .iter()
-            .map(|f| format!("{}: {:?}", f.name(), f.data_type()))
+            .map(|f| format!("{}: {}", f.name(), f.data_type()))
             .collect();
         let y = x.join("\n");
         assert_eq!(expected, y);
@@ -841,7 +841,7 @@ mod tests {
              double_col: Float64\n\
              date_string_col: Binary\n\
              string_col: Binary\n\
-             timestamp_col: Timestamp(Nanosecond, None)";
+             timestamp_col: Timestamp(ns)";
         _run_read_alltypes_plain_parquet(ForceViews::No, no_views).await?;
 
         let with_views = "id: Int32\n\
@@ -854,7 +854,7 @@ mod tests {
              double_col: Float64\n\
              date_string_col: BinaryView\n\
              string_col: BinaryView\n\
-             timestamp_col: Timestamp(Nanosecond, None)";
+             timestamp_col: Timestamp(ns)";
         _run_read_alltypes_plain_parquet(ForceViews::Yes, with_views).await?;
 
         Ok(())
diff --git a/datafusion/core/src/physical_planner.rs 
b/datafusion/core/src/physical_planner.rs
index 94c8fd510a..e7035910de 100644
--- a/datafusion/core/src/physical_planner.rs
+++ b/datafusion/core/src/physical_planner.rs
@@ -2753,7 +2753,7 @@ impl<'a> OptimizationInvariantChecker<'a> {
             && !is_allowed_schema_change(previous_schema.as_ref(), 
plan.schema().as_ref())
         {
             internal_err!(
-                "PhysicalOptimizer rule '{}' failed. Schema mismatch. Expected 
original schema: {:?}, got new schema: {:?}",
+                "PhysicalOptimizer rule '{}' failed. Schema mismatch. Expected 
original schema: {}, got new schema: {}",
                 self.rule.name(),
                 previous_schema,
                 plan.schema()
diff --git a/datafusion/core/tests/dataframe/mod.rs 
b/datafusion/core/tests/dataframe/mod.rs
index 1747f9386b..bab00ced1c 100644
--- a/datafusion/core/tests/dataframe/mod.rs
+++ b/datafusion/core/tests/dataframe/mod.rs
@@ -4802,7 +4802,7 @@ async fn unnest_with_redundant_columns() -> Result<()> {
         @r"
     Projection: shapes.shape_id [shape_id:UInt32]
       Unnest: lists[shape_id2|depth=1] structs[] [shape_id:UInt32, 
shape_id2:UInt32;N]
-        Aggregate: groupBy=[[shapes.shape_id]], 
aggr=[[array_agg(shapes.shape_id) AS shape_id2]] [shape_id:UInt32, 
shape_id2:List(Field { data_type: UInt32, nullable: true });N]
+        Aggregate: groupBy=[[shapes.shape_id]], 
aggr=[[array_agg(shapes.shape_id) AS shape_id2]] [shape_id:UInt32, 
shape_id2:List(UInt32);N]
           TableScan: shapes projection=[shape_id] [shape_id:UInt32]
     "
     );
diff --git a/datafusion/datasource/src/sink.rs 
b/datafusion/datasource/src/sink.rs
index 5460a0ffdc..5acc89722b 100644
--- a/datafusion/datasource/src/sink.rs
+++ b/datafusion/datasource/src/sink.rs
@@ -94,7 +94,7 @@ pub struct DataSinkExec {
 
 impl Debug for DataSinkExec {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "DataSinkExec schema: {:?}", self.count_schema)
+        write!(f, "DataSinkExec schema: {}", self.count_schema)
     }
 }
 
diff --git a/datafusion/expr/src/logical_plan/display.rs 
b/datafusion/expr/src/logical_plan/display.rs
index 480974b055..58c7feb616 100644
--- a/datafusion/expr/src/logical_plan/display.rs
+++ b/datafusion/expr/src/logical_plan/display.rs
@@ -117,13 +117,7 @@ pub fn display_schema(schema: &Schema) -> impl 
fmt::Display + '_ {
                     write!(f, ", ")?;
                 }
                 let nullable_str = if field.is_nullable() { ";N" } else { "" };
-                write!(
-                    f,
-                    "{}:{:?}{}",
-                    field.name(),
-                    field.data_type(),
-                    nullable_str
-                )?;
+                write!(f, "{}:{}{}", field.name(), field.data_type(), 
nullable_str)?;
             }
             write!(f, "]")
         }
diff --git a/datafusion/expr/src/type_coercion/functions.rs 
b/datafusion/expr/src/type_coercion/functions.rs
index 34147d1973..e6a1b53418 100644
--- a/datafusion/expr/src/type_coercion/functions.rs
+++ b/datafusion/expr/src/type_coercion/functions.rs
@@ -1068,7 +1068,7 @@ mod tests {
         .unwrap_err();
         assert_contains!(
             got.to_string(),
-            "Function 'test' expects NativeType::Numeric but received 
NativeType::Timestamp(Second, None)"
+            "Function 'test' expects NativeType::Numeric but received 
NativeType::Timestamp(s)"
         );
 
         Ok(())
diff --git a/datafusion/functions-nested/src/array_has.rs 
b/datafusion/functions-nested/src/array_has.rs
index 97671d4a95..abc0e7406b 100644
--- a/datafusion/functions-nested/src/array_has.rs
+++ b/datafusion/functions-nested/src/array_has.rs
@@ -262,7 +262,7 @@ impl<'a> TryFrom<&'a dyn Array> for ArrayWrapper<'a> {
             DataType::FixedSizeList(_, _) => Ok(ArrayWrapper::FixedSizeList(
                 as_fixed_size_list_array(value)?,
             )),
-            _ => exec_err!("array_has does not support type '{:?}'.", 
value.data_type()),
+            _ => exec_err!("array_has does not support type '{}'.", 
value.data_type()),
         }
     }
 }
diff --git a/datafusion/functions-nested/src/flatten.rs 
b/datafusion/functions-nested/src/flatten.rs
index 33b3e102ae..8c21348507 100644
--- a/datafusion/functions-nested/src/flatten.rs
+++ b/datafusion/functions-nested/src/flatten.rs
@@ -208,7 +208,7 @@ fn flatten_inner(args: &[ArrayRef]) -> Result<ArrayRef> {
         }
         Null => Ok(Arc::clone(array)),
         _ => {
-            exec_err!("flatten does not support type '{:?}'", 
array.data_type())
+            exec_err!("flatten does not support type '{}'", array.data_type())
         }
     }
 }
diff --git a/datafusion/functions-nested/src/map.rs 
b/datafusion/functions-nested/src/map.rs
index a96bbc0589..7df131cf5e 100644
--- a/datafusion/functions-nested/src/map.rs
+++ b/datafusion/functions-nested/src/map.rs
@@ -119,7 +119,7 @@ fn get_first_array_ref(columnar_value: &ColumnarValue) -> 
Result<ArrayRef> {
             ScalarValue::List(array) => Ok(array.value(0)),
             ScalarValue::LargeList(array) => Ok(array.value(0)),
             ScalarValue::FixedSizeList(array) => Ok(array.value(0)),
-            _ => exec_err!("Expected array, got {:?}", value),
+            _ => exec_err!("Expected array, got {}", value),
         },
         ColumnarValue::Array(array) => Ok(array.to_owned()),
     }
diff --git a/datafusion/functions-nested/src/utils.rs 
b/datafusion/functions-nested/src/utils.rs
index d2a69c010e..4a9fffa7d1 100644
--- a/datafusion/functions-nested/src/utils.rs
+++ b/datafusion/functions-nested/src/utils.rs
@@ -260,7 +260,7 @@ pub(crate) fn get_map_entry_field(data_type: &DataType) -> 
Result<&Fields> {
             match field_data_type {
                 DataType::Struct(fields) => Ok(fields),
                 _ => {
-                    internal_err!("Expected a Struct type, got {:?}", 
field_data_type)
+                    internal_err!("Expected a Struct type, got {}", 
field_data_type)
                 }
             }
         }
diff --git a/datafusion/functions/src/core/union_tag.rs 
b/datafusion/functions/src/core/union_tag.rs
index 809679dea6..fac5c82691 100644
--- a/datafusion/functions/src/core/union_tag.rs
+++ b/datafusion/functions/src/core/union_tag.rs
@@ -143,7 +143,7 @@ impl ScalarUDFImpl for UnionTagFunc {
                     args.return_field.data_type(),
                 )?)),
             },
-            v => exec_err!("union_tag only support unions, got {:?}", 
v.data_type()),
+            v => exec_err!("union_tag only support unions, got {}", 
v.data_type()),
         }
     }
 
diff --git a/datafusion/functions/src/datetime/to_char.rs 
b/datafusion/functions/src/datetime/to_char.rs
index 8d0c47cfe6..2c6f823545 100644
--- a/datafusion/functions/src/datetime/to_char.rs
+++ b/datafusion/functions/src/datetime/to_char.rs
@@ -153,7 +153,7 @@ impl ScalarUDFImpl for ToCharFunc {
             ColumnarValue::Array(_) => to_char_array(&args),
             _ => {
                 exec_err!(
-                    "Format for `to_char` must be non-null Utf8, received 
{:?}",
+                    "Format for `to_char` must be non-null Utf8, received {}",
                     format.data_type()
                 )
             }
@@ -814,7 +814,7 @@ mod tests {
         let result = ToCharFunc::new().invoke_with_args(args);
         assert_eq!(
             result.err().unwrap().strip_backtrace(),
-            "Execution error: Format for `to_char` must be non-null Utf8, 
received Timestamp(Nanosecond, None)"
+            "Execution error: Format for `to_char` must be non-null Utf8, 
received Timestamp(ns)"
         );
     }
 }
diff --git a/datafusion/optimizer/src/decorrelate_predicate_subquery.rs 
b/datafusion/optimizer/src/decorrelate_predicate_subquery.rs
index b2742719cb..b9d160d555 100644
--- a/datafusion/optimizer/src/decorrelate_predicate_subquery.rs
+++ b/datafusion/optimizer/src/decorrelate_predicate_subquery.rs
@@ -2041,7 +2041,7 @@ mod tests {
             TableScan: test [a:UInt32, b:UInt32, c:UInt32]
             SubqueryAlias: __correlated_sq_1 [arr:Int32;N]
               Unnest: lists[sq.arr|depth=1] structs[] [arr:Int32;N]
-                TableScan: sq [arr:List(Field { data_type: Int32, nullable: 
true });N]
+                TableScan: sq [arr:List(Int32);N]
         "
         )
     }
@@ -2076,7 +2076,7 @@ mod tests {
             TableScan: test [a:UInt32, b:UInt32, c:UInt32]
             SubqueryAlias: __correlated_sq_1 [a:UInt32;N]
               Unnest: lists[sq.a|depth=1] structs[] [a:UInt32;N]
-                TableScan: sq [a:List(Field { data_type: UInt32, nullable: 
true });N]
+                TableScan: sq [a:List(UInt32);N]
         "
         )
     }
diff --git a/datafusion/physical-expr/src/equivalence/properties/mod.rs 
b/datafusion/physical-expr/src/equivalence/properties/mod.rs
index 70f97139f8..996bc4b08f 100644
--- a/datafusion/physical-expr/src/equivalence/properties/mod.rs
+++ b/datafusion/physical-expr/src/equivalence/properties/mod.rs
@@ -1277,7 +1277,7 @@ impl EquivalenceProperties {
             // Rewriting equivalence properties in terms of new schema is not
             // safe when schemas are not aligned:
             return plan_err!(
-                "Schemas have to be aligned to rewrite equivalences:\n Old 
schema: {:?}\n New schema: {:?}",
+                "Schemas have to be aligned to rewrite equivalences:\n Old 
schema: {}\n New schema: {}",
                 self.schema,
                 schema
             );
diff --git a/datafusion/physical-expr/src/expressions/cast.rs 
b/datafusion/physical-expr/src/expressions/cast.rs
index f679a9587c..6fced231f3 100644
--- a/datafusion/physical-expr/src/expressions/cast.rs
+++ b/datafusion/physical-expr/src/expressions/cast.rs
@@ -146,7 +146,7 @@ impl CastExpr {
 
 impl fmt::Display for CastExpr {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "CAST({} AS {:?})", self.expr, self.cast_type)
+        write!(f, "CAST({} AS {})", self.expr, self.cast_type)
     }
 }
 
@@ -312,10 +312,7 @@ mod tests {
                 cast_with_options(col("a", &schema)?, &schema, $TYPE, 
$CAST_OPTIONS)?;
 
             // verify that its display is correct
-            assert_eq!(
-                format!("CAST(a@0 AS {:?})", $TYPE),
-                format!("{}", expression)
-            );
+            assert_eq!(format!("CAST(a@0 AS {})", $TYPE), format!("{}", 
expression));
 
             // verify that the expression's type is correct
             assert_eq!(expression.data_type(&schema)?, $TYPE);
@@ -364,10 +361,7 @@ mod tests {
                 cast_with_options(col("a", &schema)?, &schema, $TYPE, 
$CAST_OPTIONS)?;
 
             // verify that its display is correct
-            assert_eq!(
-                format!("CAST(a@0 AS {:?})", $TYPE),
-                format!("{}", expression)
-            );
+            assert_eq!(format!("CAST(a@0 AS {})", $TYPE), format!("{}", 
expression));
 
             // verify that the expression's type is correct
             assert_eq!(expression.data_type(&schema)?, $TYPE);
diff --git a/datafusion/physical-expr/src/expressions/cast_column.rs 
b/datafusion/physical-expr/src/expressions/cast_column.rs
index 3dc0293da8..d80b6f4a58 100644
--- a/datafusion/physical-expr/src/expressions/cast_column.rs
+++ b/datafusion/physical-expr/src/expressions/cast_column.rs
@@ -114,7 +114,7 @@ impl Display for CastColumnExpr {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(
             f,
-            "CAST_COLUMN({} AS {:?})",
+            "CAST_COLUMN({} AS {})",
             self.expr,
             self.target_field.data_type()
         )
diff --git a/datafusion/physical-expr/src/expressions/try_cast.rs 
b/datafusion/physical-expr/src/expressions/try_cast.rs
index c9ace3239c..c63550f430 100644
--- a/datafusion/physical-expr/src/expressions/try_cast.rs
+++ b/datafusion/physical-expr/src/expressions/try_cast.rs
@@ -72,7 +72,7 @@ impl TryCastExpr {
 
 impl fmt::Display for TryCastExpr {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "TRY_CAST({} AS {:?})", self.expr, self.cast_type)
+        write!(f, "TRY_CAST({} AS {})", self.expr, self.cast_type)
     }
 }
 
@@ -180,7 +180,7 @@ mod tests {
 
             // verify that its display is correct
             assert_eq!(
-                format!("TRY_CAST(a@0 AS {:?})", $TYPE),
+                format!("TRY_CAST(a@0 AS {})", $TYPE),
                 format!("{}", expression)
             );
 
@@ -231,7 +231,7 @@ mod tests {
 
             // verify that its display is correct
             assert_eq!(
-                format!("TRY_CAST(a@0 AS {:?})", $TYPE),
+                format!("TRY_CAST(a@0 AS {})", $TYPE),
                 format!("{}", expression)
             );
 
diff --git a/datafusion/spark/src/function/string/format_string.rs 
b/datafusion/spark/src/function/string/format_string.rs
index 73de985109..8ab87196fd 100644
--- a/datafusion/spark/src/function/string/format_string.rs
+++ b/datafusion/spark/src/function/string/format_string.rs
@@ -1431,7 +1431,7 @@ impl ConversionSpecifier {
                 let value = "null".to_string();
                 self.format_string(string, &value)
             }
-            _ => exec_err!("Invalid scalar value: {:?}", value),
+            _ => exec_err!("Invalid scalar value: {value}"),
         }
     }
 
diff --git a/datafusion/spark/src/function/url/parse_url.rs 
b/datafusion/spark/src/function/url/parse_url.rs
index e82ef28045..7beb02f775 100644
--- a/datafusion/spark/src/function/url/parse_url.rs
+++ b/datafusion/spark/src/function/url/parse_url.rs
@@ -217,7 +217,12 @@ pub fn spark_handled_parse_url(
                     handler_err,
                 )
             }
-            _ => exec_err!("{} expects STRING arguments, got {:?}", 
"`parse_url`", args),
+            _ => exec_err!(
+                "`parse_url` expects STRING arguments, got ({}, {}, {})",
+                url.data_type(),
+                part.data_type(),
+                key.data_type()
+            ),
         }
     } else {
         // The 'key' argument is omitted, assume all values are null
@@ -253,7 +258,11 @@ pub fn spark_handled_parse_url(
                     handler_err,
                 )
             }
-            _ => exec_err!("{} expects STRING arguments, got {:?}", 
"`parse_url`", args),
+            _ => exec_err!(
+                "`parse_url` expects STRING arguments, got ({}, {})",
+                url.data_type(),
+                part.data_type()
+            ),
         }
     }
 }
diff --git a/datafusion/sqllogictest/test_files/aggregate.slt 
b/datafusion/sqllogictest/test_files/aggregate.slt
index e911a16be7..036bb93283 100644
--- a/datafusion/sqllogictest/test_files/aggregate.slt
+++ b/datafusion/sqllogictest/test_files/aggregate.slt
@@ -5501,10 +5501,10 @@ as values
 statement ok
 create table t as
 select
-  arrow_cast(column1, 'Timestamp(Nanosecond, None)') as nanos,
-  arrow_cast(column1, 'Timestamp(Microsecond, None)') as micros,
-  arrow_cast(column1, 'Timestamp(Millisecond, None)') as millis,
-  arrow_cast(column1, 'Timestamp(Second, None)') as secs,
+  arrow_cast(column1, 'Timestamp(ns)') as nanos,
+  arrow_cast(column1, 'Timestamp(µs)') as micros,
+  arrow_cast(column1, 'Timestamp(ms)') as millis,
+  arrow_cast(column1, 'Timestamp(s)') as secs,
   arrow_cast(column1, 'Timestamp(Nanosecond, Some("UTC"))') as nanos_utc,
   arrow_cast(column1, 'Timestamp(Microsecond, Some("UTC"))') as micros_utc,
   arrow_cast(column1, 'Timestamp(Millisecond, Some("UTC"))') as millis_utc,
@@ -5587,7 +5587,7 @@ SELECT tag, avg(nanos), avg(micros), avg(millis), 
avg(secs) FROM t GROUP BY tag
 
 # aggregate_duration_array_agg
 query T?
-SELECT tag, array_agg(millis - arrow_cast(secs, 'Timestamp(Millisecond, 
None)')) FROM t GROUP BY tag ORDER BY tag;
+SELECT tag, array_agg(millis - arrow_cast(secs, 'Timestamp(ms)')) FROM t GROUP 
BY tag ORDER BY tag;
 ----
 X [0 days 0 hours 0 mins 0.011 secs, 0 days 0 hours 0 mins 0.123 secs]
 Y [NULL, 0 days 0 hours 0 mins 0.432 secs]
diff --git a/datafusion/sqllogictest/test_files/array.slt 
b/datafusion/sqllogictest/test_files/array.slt
index 1640cbbf7f..e0a9746793 100644
--- a/datafusion/sqllogictest/test_files/array.slt
+++ b/datafusion/sqllogictest/test_files/array.slt
@@ -7220,12 +7220,12 @@ select generate_series('2021-01-01'::timestamp, 
'2021-01-01T15:00:00'::timestamp
 
 # Other timestamp types are coerced to nanosecond
 query ?
-select generate_series(arrow_cast('2021-01-01'::timestamp, 'Timestamp(Second, 
None)'), '2021-01-01T15:00:00'::timestamp, INTERVAL '1' HOUR);
+select generate_series(arrow_cast('2021-01-01'::timestamp, 'Timestamp(s)'), 
'2021-01-01T15:00:00'::timestamp, INTERVAL '1' HOUR);
 ----
 [2021-01-01T00:00:00, 2021-01-01T01:00:00, 2021-01-01T02:00:00, 
2021-01-01T03:00:00, 2021-01-01T04:00:00, 2021-01-01T05:00:00, 
2021-01-01T06:00:00, 2021-01-01T07:00:00, 2021-01-01T08:00:00, 
2021-01-01T09:00:00, 2021-01-01T10:00:00, 2021-01-01T11:00:00, 
2021-01-01T12:00:00, 2021-01-01T13:00:00, 2021-01-01T14:00:00, 
2021-01-01T15:00:00]
 
 query ?
-select generate_series('2021-01-01'::timestamp, 
arrow_cast('2021-01-01T15:00:00'::timestamp, 'Timestamp(Microsecond, None)'), 
INTERVAL '1' HOUR);
+select generate_series('2021-01-01'::timestamp, 
arrow_cast('2021-01-01T15:00:00'::timestamp, 'Timestamp(µs)'), INTERVAL '1' 
HOUR);
 ----
 [2021-01-01T00:00:00, 2021-01-01T01:00:00, 2021-01-01T02:00:00, 
2021-01-01T03:00:00, 2021-01-01T04:00:00, 2021-01-01T05:00:00, 
2021-01-01T06:00:00, 2021-01-01T07:00:00, 2021-01-01T08:00:00, 
2021-01-01T09:00:00, 2021-01-01T10:00:00, 2021-01-01T11:00:00, 
2021-01-01T12:00:00, 2021-01-01T13:00:00, 2021-01-01T14:00:00, 
2021-01-01T15:00:00]
 
diff --git a/datafusion/sqllogictest/test_files/arrow_typeof.slt 
b/datafusion/sqllogictest/test_files/arrow_typeof.slt
index d6f9ffadcf..0c69e8591c 100644
--- a/datafusion/sqllogictest/test_files/arrow_typeof.slt
+++ b/datafusion/sqllogictest/test_files/arrow_typeof.slt
@@ -123,10 +123,10 @@ SELECT
   arrow_typeof(arrow_cast('foo', 'Utf8View')) as col_utf8_view,
   arrow_typeof(arrow_cast('foo', 'Binary')) as col_binary,
   arrow_typeof(arrow_cast('foo', 'LargeBinary')) as col_large_binary,
-  arrow_typeof(arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(Second, None)')) as col_ts_s,
-  arrow_typeof(arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(Millisecond, None)')) as col_ts_ms,
-  arrow_typeof(arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(Microsecond, None)')) as col_ts_us,
-  arrow_typeof(arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(Nanosecond, None)')) as col_ts_ns,
+  arrow_typeof(arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(s)')) as col_ts_s,
+  arrow_typeof(arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(ms)')) as col_ts_ms,
+  arrow_typeof(arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(µs)')) as col_ts_us,
+  arrow_typeof(arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(ns)')) as col_ts_ns,
   arrow_typeof(arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(Second, Some("+08:00"))')) as col_tstz_s,
   arrow_typeof(arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(Millisecond, Some("+08:00"))')) as col_tstz_ms,
   arrow_typeof(arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(Microsecond, Some("+08:00"))')) as col_tstz_us,
@@ -242,10 +242,10 @@ drop table foo
 
 statement ok
 create table foo as select
-  arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(Second, None)') as col_ts_s,
-  arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(Millisecond, None)') as col_ts_ms,
-  arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(Microsecond, None)') as col_ts_us,
-  arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(Nanosecond, None)') as col_ts_ns
+  arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 'Timestamp(s)') 
as col_ts_s,
+  arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 'Timestamp(ms)') 
as col_ts_ms,
+  arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 'Timestamp(µs)') 
as col_ts_us,
+  arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 'Timestamp(ns)') 
as col_ts_ns
 ;
 
 ## Ensure each column in the table has the expected type
diff --git a/datafusion/sqllogictest/test_files/datetime/date_part.slt 
b/datafusion/sqllogictest/test_files/datetime/date_part.slt
index 315572eb2b..019a988a9d 100644
--- a/datafusion/sqllogictest/test_files/datetime/date_part.slt
+++ b/datafusion/sqllogictest/test_files/datetime/date_part.slt
@@ -40,30 +40,32 @@ with t as (values
 )
 SELECT
   -- nanoseconds, with no, utc, and local timezone
-  arrow_cast(column1, 'Timestamp(Nanosecond, None)') as ts_nano_no_tz,
+  arrow_cast(column1, 'Timestamp(ns)') as ts_nano_no_tz,
+  arrow_cast(column1, 'Timestamp(Nanosecond, None)') as 
ts_nano_no_tz_old_format,
   arrow_cast(column1, 'Timestamp(Nanosecond, Some("UTC"))') as ts_nano_utc,
   arrow_cast(column1, 'Timestamp(Nanosecond, Some("America/New_York"))') as 
ts_nano_eastern,
   -- milliseconds, with no, utc, and local timezone
-  arrow_cast(column1, 'Timestamp(Millisecond, None)') as ts_milli_no_tz,
+  arrow_cast(column1, 'Timestamp(ms)') as ts_milli_no_tz,
+  arrow_cast(column1, 'Timestamp(Millisecond, None)') as 
ts_milli_no_tz_old_format,
   arrow_cast(column1, 'Timestamp(Millisecond, Some("UTC"))') as ts_milli_utc,
   arrow_cast(column1, 'Timestamp(Millisecond, Some("America/New_York"))') as 
ts_milli_eastern
 FROM t;
 
 
-query PPPPPP
+query PPPPPPPP
 SELECT * FROM source_ts;
 ----
-2020-01-01T00:00:00 2020-01-01T00:00:00Z 2019-12-31T19:00:00-05:00 
2020-01-01T00:00:00 2020-01-01T00:00:00Z 2019-12-31T19:00:00-05:00
-2021-01-01T00:00:00 2021-01-01T00:00:00Z 2020-12-31T19:00:00-05:00 
2021-01-01T00:00:00 2021-01-01T00:00:00Z 2020-12-31T19:00:00-05:00
-2020-09-01T00:00:00 2020-09-01T00:00:00Z 2020-08-31T20:00:00-04:00 
2020-09-01T00:00:00 2020-09-01T00:00:00Z 2020-08-31T20:00:00-04:00
-2020-01-25T00:00:00 2020-01-25T00:00:00Z 2020-01-24T19:00:00-05:00 
2020-01-25T00:00:00 2020-01-25T00:00:00Z 2020-01-24T19:00:00-05:00
-2020-01-24T00:00:00 2020-01-24T00:00:00Z 2020-01-23T19:00:00-05:00 
2020-01-24T00:00:00 2020-01-24T00:00:00Z 2020-01-23T19:00:00-05:00
-2020-01-01T12:00:00 2020-01-01T12:00:00Z 2020-01-01T07:00:00-05:00 
2020-01-01T12:00:00 2020-01-01T12:00:00Z 2020-01-01T07:00:00-05:00
-2020-01-01T00:30:00 2020-01-01T00:30:00Z 2019-12-31T19:30:00-05:00 
2020-01-01T00:30:00 2020-01-01T00:30:00Z 2019-12-31T19:30:00-05:00
-2020-01-01T00:00:30 2020-01-01T00:00:30Z 2019-12-31T19:00:30-05:00 
2020-01-01T00:00:30 2020-01-01T00:00:30Z 2019-12-31T19:00:30-05:00
-2020-01-01T00:00:00.123 2020-01-01T00:00:00.123Z 2019-12-31T19:00:00.123-05:00 
2020-01-01T00:00:00.123 2020-01-01T00:00:00.123Z 2019-12-31T19:00:00.123-05:00
-2020-01-01T00:00:00.123456 2020-01-01T00:00:00.123456Z 
2019-12-31T19:00:00.123456-05:00 2020-01-01T00:00:00.123 
2020-01-01T00:00:00.123Z 2019-12-31T19:00:00.123-05:00
-2020-01-01T00:00:00.123456789 2020-01-01T00:00:00.123456789Z 
2019-12-31T19:00:00.123456789-05:00 2020-01-01T00:00:00.123 
2020-01-01T00:00:00.123Z 2019-12-31T19:00:00.123-05:00
+2020-01-01T00:00:00 2020-01-01T00:00:00 2020-01-01T00:00:00Z 
2019-12-31T19:00:00-05:00 2020-01-01T00:00:00 2020-01-01T00:00:00 
2020-01-01T00:00:00Z 2019-12-31T19:00:00-05:00
+2021-01-01T00:00:00 2021-01-01T00:00:00 2021-01-01T00:00:00Z 
2020-12-31T19:00:00-05:00 2021-01-01T00:00:00 2021-01-01T00:00:00 
2021-01-01T00:00:00Z 2020-12-31T19:00:00-05:00
+2020-09-01T00:00:00 2020-09-01T00:00:00 2020-09-01T00:00:00Z 
2020-08-31T20:00:00-04:00 2020-09-01T00:00:00 2020-09-01T00:00:00 
2020-09-01T00:00:00Z 2020-08-31T20:00:00-04:00
+2020-01-25T00:00:00 2020-01-25T00:00:00 2020-01-25T00:00:00Z 
2020-01-24T19:00:00-05:00 2020-01-25T00:00:00 2020-01-25T00:00:00 
2020-01-25T00:00:00Z 2020-01-24T19:00:00-05:00
+2020-01-24T00:00:00 2020-01-24T00:00:00 2020-01-24T00:00:00Z 
2020-01-23T19:00:00-05:00 2020-01-24T00:00:00 2020-01-24T00:00:00 
2020-01-24T00:00:00Z 2020-01-23T19:00:00-05:00
+2020-01-01T12:00:00 2020-01-01T12:00:00 2020-01-01T12:00:00Z 
2020-01-01T07:00:00-05:00 2020-01-01T12:00:00 2020-01-01T12:00:00 
2020-01-01T12:00:00Z 2020-01-01T07:00:00-05:00
+2020-01-01T00:30:00 2020-01-01T00:30:00 2020-01-01T00:30:00Z 
2019-12-31T19:30:00-05:00 2020-01-01T00:30:00 2020-01-01T00:30:00 
2020-01-01T00:30:00Z 2019-12-31T19:30:00-05:00
+2020-01-01T00:00:30 2020-01-01T00:00:30 2020-01-01T00:00:30Z 
2019-12-31T19:00:30-05:00 2020-01-01T00:00:30 2020-01-01T00:00:30 
2020-01-01T00:00:30Z 2019-12-31T19:00:30-05:00
+2020-01-01T00:00:00.123 2020-01-01T00:00:00.123 2020-01-01T00:00:00.123Z 
2019-12-31T19:00:00.123-05:00 2020-01-01T00:00:00.123 2020-01-01T00:00:00.123 
2020-01-01T00:00:00.123Z 2019-12-31T19:00:00.123-05:00
+2020-01-01T00:00:00.123456 2020-01-01T00:00:00.123456 
2020-01-01T00:00:00.123456Z 2019-12-31T19:00:00.123456-05:00 
2020-01-01T00:00:00.123 2020-01-01T00:00:00.123 2020-01-01T00:00:00.123Z 
2019-12-31T19:00:00.123-05:00
+2020-01-01T00:00:00.123456789 2020-01-01T00:00:00.123456789 
2020-01-01T00:00:00.123456789Z 2019-12-31T19:00:00.123456789-05:00 
2020-01-01T00:00:00.123 2020-01-01T00:00:00.123 2020-01-01T00:00:00.123Z 
2019-12-31T19:00:00.123-05:00
 
 # date_part (year) with columns and explicit timestamp
 query IIIIII
diff --git a/datafusion/sqllogictest/test_files/datetime/timestamps.slt 
b/datafusion/sqllogictest/test_files/datetime/timestamps.slt
index f0b3a480c8..fa25994ed7 100644
--- a/datafusion/sqllogictest/test_files/datetime/timestamps.slt
+++ b/datafusion/sqllogictest/test_files/datetime/timestamps.slt
@@ -19,10 +19,10 @@
 ## Common timestamp data
 #
 # ts_data:        Int64 nanoseconds
-# ts_data_nanos:  Timestamp(Nanosecond, None)
-# ts_data_micros: Timestamp(Microsecond, None)
-# ts_data_millis: Timestamp(Millisecond, None)
-# ts_data_secs:   Timestamp(Second, None)
+# ts_data_nanos:  Timestamp(ns)
+# ts_data_micros: Timestamp(µs)
+# ts_data_millis: Timestamp(ms)
+# ts_data_secs:   Timestamp(s)
 ##########
 
 # Create timestamp tables with different precisions but the same logical values
@@ -34,16 +34,16 @@ create table ts_data(ts bigint, value int) as values
   (1599565349190855123, 3);
 
 statement ok
-create table ts_data_nanos as select arrow_cast(ts, 'Timestamp(Nanosecond, 
None)') as ts, value from ts_data;
+create table ts_data_nanos as select arrow_cast(ts, 'Timestamp(ns)') as ts, 
value from ts_data;
 
 statement ok
-create table ts_data_micros as select arrow_cast(ts / 1000, 
'Timestamp(Microsecond, None)') as ts, value from ts_data;
+create table ts_data_micros as select arrow_cast(ts / 1000, 'Timestamp(µs)') 
as ts, value from ts_data;
 
 statement ok
-create table ts_data_millis as select arrow_cast(ts / 1000000, 
'Timestamp(Millisecond, None)') as ts, value from ts_data;
+create table ts_data_millis as select arrow_cast(ts / 1000000, 
'Timestamp(ms)') as ts, value from ts_data;
 
 statement ok
-create table ts_data_secs as select arrow_cast(ts / 1000000000, 
'Timestamp(Second, None)') as ts, value from ts_data;
+create table ts_data_secs as select arrow_cast(ts / 1000000000, 
'Timestamp(s)') as ts, value from ts_data;
 
 statement ok
 create table ts_data_micros_kolkata as select arrow_cast(ts / 1000, 
'Timestamp(Microsecond, Some("Asia/Kolkata"))') as ts, value from ts_data;
@@ -1579,13 +1579,13 @@ second 2020-09-08T13:42:29
 
 # test date trunc on different timestamp scalar types and ensure they are 
consistent
 query P rowsort
-SELECT DATE_TRUNC('second', arrow_cast(TIMESTAMP '2023-08-03 14:38:50Z', 
'Timestamp(Second, None)')) as ts
+SELECT DATE_TRUNC('second', arrow_cast(TIMESTAMP '2023-08-03 14:38:50Z', 
'Timestamp(s)')) as ts
   UNION ALL
-SELECT DATE_TRUNC('second', arrow_cast(TIMESTAMP '2023-08-03 14:38:50Z', 
'Timestamp(Nanosecond, None)')) as ts
+SELECT DATE_TRUNC('second', arrow_cast(TIMESTAMP '2023-08-03 14:38:50Z', 
'Timestamp(ns)')) as ts
   UNION ALL
-SELECT DATE_TRUNC('day', arrow_cast(TIMESTAMP '2023-08-03 14:38:50Z', 
'Timestamp(Microsecond, None)')) as ts
+SELECT DATE_TRUNC('day', arrow_cast(TIMESTAMP '2023-08-03 14:38:50Z', 
'Timestamp(µs)')) as ts
   UNION ALL
-SELECT DATE_TRUNC('day', arrow_cast(TIMESTAMP '2023-08-03 14:38:50Z', 
'Timestamp(Millisecond, None)')) as ts
+SELECT DATE_TRUNC('day', arrow_cast(TIMESTAMP '2023-08-03 14:38:50Z', 
'Timestamp(ms)')) as ts
 ----
 2023-08-03T00:00:00
 2023-08-03T00:00:00
@@ -2706,7 +2706,7 @@ drop table ts_utf8_data
 ##########
 
 query B
-select arrow_cast(now(), 'Date64') < arrow_cast('2022-02-02 02:02:02', 
'Timestamp(Nanosecond, None)');
+select arrow_cast(now(), 'Date64') < arrow_cast('2022-02-02 02:02:02', 
'Timestamp(ns)');
 ----
 false
 
@@ -3640,7 +3640,7 @@ select to_char(arrow_cast(12344567890000, 
'Time64(Nanosecond)'), '%H-%M-%S %f')
 03-25-44 567890000
 
 query T
-select to_char(arrow_cast(TIMESTAMP '2023-08-03 14:38:50Z', 'Timestamp(Second, 
None)'), '%d-%m-%Y %H-%M-%S')
+select to_char(arrow_cast(TIMESTAMP '2023-08-03 14:38:50Z', 'Timestamp(s)'), 
'%d-%m-%Y %H-%M-%S')
 ----
 03-08-2023 14-38-50
 
@@ -3732,7 +3732,7 @@ select 
to_unixtime(arrow_cast(to_timestamp('2023-01-14T01:01:30'), 'Timestamp(Se
 1673638290
 
 query I
-select to_unixtime(arrow_cast(to_timestamp('2023-01-14T01:01:30'), 
'Timestamp(Millisecond, None)'));
+select to_unixtime(arrow_cast(to_timestamp('2023-01-14T01:01:30'), 
'Timestamp(ms)'));
 ----
 1673658090
 
@@ -4307,58 +4307,58 @@ SELECT CAST(CAST(one AS decimal(17,2)) AS timestamp(3)) 
AS a FROM (VALUES (1)) t
 1970-01-01T00:00:00.001
 
 query P
-SELECT arrow_cast(CAST(1   AS decimal(17,2)), 'Timestamp(Nanosecond, None)') 
AS a UNION ALL
-SELECT arrow_cast(CAST(one AS decimal(17,2)), 'Timestamp(Nanosecond, None)') 
AS a FROM (VALUES (1)) t(one);
+SELECT arrow_cast(CAST(1   AS decimal(17,2)), 'Timestamp(ns)') AS a UNION ALL
+SELECT arrow_cast(CAST(one AS decimal(17,2)), 'Timestamp(ns)') AS a FROM 
(VALUES (1)) t(one);
 ----
 1970-01-01T00:00:00.000000001
 1970-01-01T00:00:00.000000001
 
 query P
-SELECT arrow_cast(CAST(1   AS decimal(17,2)), 'Timestamp(Microsecond, None)') 
AS a UNION ALL
-SELECT arrow_cast(CAST(one AS decimal(17,2)), 'Timestamp(Microsecond, None)') 
AS a FROM (VALUES (1)) t(one);
+SELECT arrow_cast(CAST(1   AS decimal(17,2)), 'Timestamp(µs)') AS a UNION ALL
+SELECT arrow_cast(CAST(one AS decimal(17,2)), 'Timestamp(µs)') AS a FROM 
(VALUES (1)) t(one);
 ----
 1970-01-01T00:00:00.000001
 1970-01-01T00:00:00.000001
 
 query P
-SELECT arrow_cast(CAST(1   AS decimal(17,2)), 'Timestamp(Millisecond, None)') 
AS a UNION ALL
-SELECT arrow_cast(CAST(one AS decimal(17,2)), 'Timestamp(Millisecond, None)') 
AS a FROM (VALUES (1)) t(one);
+SELECT arrow_cast(CAST(1   AS decimal(17,2)), 'Timestamp(ms)') AS a UNION ALL
+SELECT arrow_cast(CAST(one AS decimal(17,2)), 'Timestamp(ms)') AS a FROM 
(VALUES (1)) t(one);
 ----
 1970-01-01T00:00:00.001
 1970-01-01T00:00:00.001
 
 query P
-SELECT arrow_cast(CAST(1   AS decimal(17,2)), 'Timestamp(Second, None)') AS a 
UNION ALL
-SELECT arrow_cast(CAST(one AS decimal(17,2)), 'Timestamp(Second, None)') AS a 
FROM (VALUES (1)) t(one);
+SELECT arrow_cast(CAST(1   AS decimal(17,2)), 'Timestamp(s)') AS a UNION ALL
+SELECT arrow_cast(CAST(one AS decimal(17,2)), 'Timestamp(s)') AS a FROM 
(VALUES (1)) t(one);
 ----
 1970-01-01T00:00:01
 1970-01-01T00:00:01
 
 
 query P
-SELECT arrow_cast(CAST(1.123 AS decimal(17,3)), 'Timestamp(Nanosecond, None)') 
AS a UNION ALL
-SELECT arrow_cast(CAST(one AS decimal(17,3)), 'Timestamp(Nanosecond, None)') 
AS a FROM (VALUES (1.123)) t(one);
+SELECT arrow_cast(CAST(1.123 AS decimal(17,3)), 'Timestamp(ns)') AS a UNION ALL
+SELECT arrow_cast(CAST(one AS decimal(17,3)), 'Timestamp(ns)') AS a FROM 
(VALUES (1.123)) t(one);
 ----
 1970-01-01T00:00:00.000000001
 1970-01-01T00:00:00.000000001
 
 query P
-SELECT arrow_cast(CAST(1.123 AS decimal(17,3)), 'Timestamp(Microsecond, 
None)') AS a UNION ALL
-SELECT arrow_cast(CAST(one AS decimal(17,3)), 'Timestamp(Microsecond, None)') 
AS a FROM (VALUES (1.123)) t(one);
+SELECT arrow_cast(CAST(1.123 AS decimal(17,3)), 'Timestamp(µs)') AS a UNION ALL
+SELECT arrow_cast(CAST(one AS decimal(17,3)), 'Timestamp(µs)') AS a FROM 
(VALUES (1.123)) t(one);
 ----
 1970-01-01T00:00:00.000001
 1970-01-01T00:00:00.000001
 
 query P
-SELECT arrow_cast(CAST(1.123 AS decimal(17,3)), 'Timestamp(Millisecond, 
None)') AS a UNION ALL
-SELECT arrow_cast(CAST(one AS decimal(17,3)), 'Timestamp(Millisecond, None)') 
AS a FROM (VALUES (1.123)) t(one);
+SELECT arrow_cast(CAST(1.123 AS decimal(17,3)), 'Timestamp(ms)') AS a UNION ALL
+SELECT arrow_cast(CAST(one AS decimal(17,3)), 'Timestamp(ms)') AS a FROM 
(VALUES (1.123)) t(one);
 ----
 1970-01-01T00:00:00.001
 1970-01-01T00:00:00.001
 
 query P
-SELECT arrow_cast(CAST(1.123 AS decimal(17,3)), 'Timestamp(Second, None)') AS 
a UNION ALL
-SELECT arrow_cast(CAST(one AS decimal(17,3)), 'Timestamp(Second, None)') AS a 
FROM (VALUES (1.123)) t(one);
+SELECT arrow_cast(CAST(1.123 AS decimal(17,3)), 'Timestamp(s)') AS a UNION ALL
+SELECT arrow_cast(CAST(one AS decimal(17,3)), 'Timestamp(s)') AS a FROM 
(VALUES (1.123)) t(one);
 ----
 1970-01-01T00:00:01
 1970-01-01T00:00:01
@@ -4410,7 +4410,7 @@ FROM ts_data_micros_kolkata
 ## Casting between timestamp with and without timezone
 ##########
 
-# Test casting from Timestamp(Nanosecond, Some("UTC")) to 
Timestamp(Nanosecond, None)
+# Test casting from Timestamp(Nanosecond, Some("UTC")) to Timestamp(ns)
 # Verifies that the underlying nanosecond values are preserved when removing 
timezone
 
 # Verify input type
@@ -4421,13 +4421,13 @@ Timestamp(ns, "UTC")
 
 # Verify output type after casting
 query T
-SELECT arrow_typeof(arrow_cast(arrow_cast(1, 'Timestamp(Nanosecond, 
Some("UTC"))'), 'Timestamp(Nanosecond, None)'));
+SELECT arrow_typeof(arrow_cast(arrow_cast(1, 'Timestamp(Nanosecond, 
Some("UTC"))'), 'Timestamp(ns)'));
 ----
 Timestamp(ns)
 
 # Verify values are preserved when casting from timestamp with timezone to 
timestamp without timezone
 query P rowsort
-SELECT arrow_cast(column1, 'Timestamp(Nanosecond, None)')
+SELECT arrow_cast(column1, 'Timestamp(ns)')
 FROM (VALUES
   (arrow_cast(1, 'Timestamp(Nanosecond, Some("UTC"))')),
   (arrow_cast(2, 'Timestamp(Nanosecond, Some("UTC"))')),
@@ -4442,18 +4442,18 @@ FROM (VALUES
 1970-01-01T00:00:00.000000004
 1970-01-01T00:00:00.000000005
 
-# Test casting from Timestamp(Nanosecond, None) to Timestamp(Nanosecond, 
Some("UTC"))
+# Test casting from Timestamp(ns) to Timestamp(Nanosecond, Some("UTC"))
 # Verifies that the underlying nanosecond values are preserved when adding 
timezone
 
 # Verify input type
 query T
-SELECT arrow_typeof(arrow_cast(1, 'Timestamp(Nanosecond, None)'));
+SELECT arrow_typeof(arrow_cast(1, 'Timestamp(ns)'));
 ----
 Timestamp(ns)
 
 # Verify output type after casting
 query T
-SELECT arrow_typeof(arrow_cast(arrow_cast(1, 'Timestamp(Nanosecond, None)'), 
'Timestamp(Nanosecond, Some("UTC"))'));
+SELECT arrow_typeof(arrow_cast(arrow_cast(1, 'Timestamp(ns)'), 
'Timestamp(Nanosecond, Some("UTC"))'));
 ----
 Timestamp(ns, "UTC")
 
@@ -4461,11 +4461,11 @@ Timestamp(ns, "UTC")
 query P rowsort
 SELECT arrow_cast(column1, 'Timestamp(Nanosecond, Some("UTC"))')
 FROM (VALUES
-  (arrow_cast(1, 'Timestamp(Nanosecond, None)')),
-  (arrow_cast(2, 'Timestamp(Nanosecond, None)')),
-  (arrow_cast(3, 'Timestamp(Nanosecond, None)')),
-  (arrow_cast(4, 'Timestamp(Nanosecond, None)')),
-  (arrow_cast(5, 'Timestamp(Nanosecond, None)'))
+  (arrow_cast(1, 'Timestamp(ns)')),
+  (arrow_cast(2, 'Timestamp(ns)')),
+  (arrow_cast(3, 'Timestamp(ns)')),
+  (arrow_cast(4, 'Timestamp(ns)')),
+  (arrow_cast(5, 'Timestamp(ns)'))
 ) t;
 ----
 1970-01-01T00:00:00.000000001Z
diff --git a/datafusion/sqllogictest/test_files/dictionary.slt 
b/datafusion/sqllogictest/test_files/dictionary.slt
index b6098758a9..511061cf82 100644
--- a/datafusion/sqllogictest/test_files/dictionary.slt
+++ b/datafusion/sqllogictest/test_files/dictionary.slt
@@ -36,7 +36,7 @@ SELECT
     arrow_cast(column3, 'Utf8') as f2,
     arrow_cast(column4, 'Utf8') as f3,
     arrow_cast(column5, 'Float64') as f4,
-    arrow_cast(column6, 'Timestamp(Nanosecond, None)') as time
+    arrow_cast(column6, 'Timestamp(ns)') as time
 FROM (
     VALUES
     -- equivalent to the following line protocol data
@@ -111,7 +111,7 @@ SELECT
     arrow_cast(column1, 'Dictionary(Int32, Utf8)') as type,
     arrow_cast(column2, 'Dictionary(Int32, Utf8)') as tag_id,
     arrow_cast(column3, 'Float64') as f5,
-    arrow_cast(column4, 'Timestamp(Nanosecond, None)') as time
+    arrow_cast(column4, 'Timestamp(ns)') as time
 FROM (
     VALUES
     -- equivalent to the following line protocol data
diff --git a/datafusion/sqllogictest/test_files/explain.slt 
b/datafusion/sqllogictest/test_files/explain.slt
index 4d6df4530f..6f615ec391 100644
--- a/datafusion/sqllogictest/test_files/explain.slt
+++ b/datafusion/sqllogictest/test_files/explain.slt
@@ -299,8 +299,8 @@ initial_physical_plan
 01)GlobalLimitExec: skip=0, fetch=10, statistics=[Rows=Exact(8), Bytes=Absent, 
[(Col[0]: ScanBytes=Exact(32)),(Col[1]: ScanBytes=Inexact(24)),(Col[2]: 
ScanBytes=Exact(32)),(Col[3]: ScanBytes=Exact(32)),(Col[4]: 
ScanBytes=Exact(32)),(Col[5]: ScanBytes=Exact(64)),(Col[6]: 
ScanBytes=Exact(32)),(Col[7]: ScanBytes=Exact(64)),(Col[8]: 
ScanBytes=Inexact(88)),(Col[9]: ScanBytes=Inexact(49)),(Col[10]: 
ScanBytes=Exact(64))]]
 02)--DataSourceExec: file_groups={1 group: 
[[WORKSPACE_ROOT/parquet-testing/data/alltypes_plain.parquet]]}, 
projection=[id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, 
float_col, double_col, date_string_col, string_col, timestamp_col], limit=10, 
file_type=parquet, statistics=[Rows=Exact(8), Bytes=Absent, [(Col[0]: 
ScanBytes=Exact(32)),(Col[1]: ScanBytes=Inexact(24)),(Col[2]: 
ScanBytes=Exact(32)),(Col[3]: ScanBytes=Exact(32)),(Col[4]: 
ScanBytes=Exact(32)),(Col[5]: ScanBytes= [...]
 initial_physical_plan_with_schema
-01)GlobalLimitExec: skip=0, fetch=10, schema=[id:Int32;N, bool_col:Boolean;N, 
tinyint_col:Int32;N, smallint_col:Int32;N, int_col:Int32;N, bigint_col:Int64;N, 
float_col:Float32;N, double_col:Float64;N, date_string_col:BinaryView;N, 
string_col:BinaryView;N, timestamp_col:Timestamp(Nanosecond, None);N]
-02)--DataSourceExec: file_groups={1 group: 
[[WORKSPACE_ROOT/parquet-testing/data/alltypes_plain.parquet]]}, 
projection=[id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, 
float_col, double_col, date_string_col, string_col, timestamp_col], limit=10, 
file_type=parquet, schema=[id:Int32;N, bool_col:Boolean;N, tinyint_col:Int32;N, 
smallint_col:Int32;N, int_col:Int32;N, bigint_col:Int64;N, float_col:Float32;N, 
double_col:Float64;N, date_string_col:BinaryView;N, string_col:BinaryVie [...]
+01)GlobalLimitExec: skip=0, fetch=10, schema=[id:Int32;N, bool_col:Boolean;N, 
tinyint_col:Int32;N, smallint_col:Int32;N, int_col:Int32;N, bigint_col:Int64;N, 
float_col:Float32;N, double_col:Float64;N, date_string_col:BinaryView;N, 
string_col:BinaryView;N, timestamp_col:Timestamp(ns);N]
+02)--DataSourceExec: file_groups={1 group: 
[[WORKSPACE_ROOT/parquet-testing/data/alltypes_plain.parquet]]}, 
projection=[id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, 
float_col, double_col, date_string_col, string_col, timestamp_col], limit=10, 
file_type=parquet, schema=[id:Int32;N, bool_col:Boolean;N, tinyint_col:Int32;N, 
smallint_col:Int32;N, int_col:Int32;N, bigint_col:Int64;N, float_col:Float32;N, 
double_col:Float64;N, date_string_col:BinaryView;N, string_col:BinaryVie [...]
 physical_plan after OutputRequirements
 01)OutputRequirementExec: order_by=[], dist_by=Unspecified, 
statistics=[Rows=Exact(8), Bytes=Absent, [(Col[0]: 
ScanBytes=Exact(32)),(Col[1]: ScanBytes=Inexact(24)),(Col[2]: 
ScanBytes=Exact(32)),(Col[3]: ScanBytes=Exact(32)),(Col[4]: 
ScanBytes=Exact(32)),(Col[5]: ScanBytes=Exact(64)),(Col[6]: 
ScanBytes=Exact(32)),(Col[7]: ScanBytes=Exact(64)),(Col[8]: 
ScanBytes=Inexact(88)),(Col[9]: ScanBytes=Inexact(49)),(Col[10]: 
ScanBytes=Exact(64))]]
 02)--GlobalLimitExec: skip=0, fetch=10, statistics=[Rows=Exact(8), 
Bytes=Absent, [(Col[0]: ScanBytes=Exact(32)),(Col[1]: 
ScanBytes=Inexact(24)),(Col[2]: ScanBytes=Exact(32)),(Col[3]: 
ScanBytes=Exact(32)),(Col[4]: ScanBytes=Exact(32)),(Col[5]: 
ScanBytes=Exact(64)),(Col[6]: ScanBytes=Exact(32)),(Col[7]: 
ScanBytes=Exact(64)),(Col[8]: ScanBytes=Inexact(88)),(Col[9]: 
ScanBytes=Inexact(49)),(Col[10]: ScanBytes=Exact(64))]]
@@ -326,7 +326,7 @@ physical_plan after EnsureCooperative SAME TEXT AS ABOVE
 physical_plan after FilterPushdown(Post) SAME TEXT AS ABOVE
 physical_plan after SanityCheckPlan SAME TEXT AS ABOVE
 physical_plan DataSourceExec: file_groups={1 group: 
[[WORKSPACE_ROOT/parquet-testing/data/alltypes_plain.parquet]]}, 
projection=[id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, 
float_col, double_col, date_string_col, string_col, timestamp_col], limit=10, 
file_type=parquet, statistics=[Rows=Exact(8), Bytes=Absent, [(Col[0]: 
ScanBytes=Exact(32)),(Col[1]: ScanBytes=Inexact(24)),(Col[2]: 
ScanBytes=Exact(32)),(Col[3]: ScanBytes=Exact(32)),(Col[4]: 
ScanBytes=Exact(32)),(Col[5]: S [...]
-physical_plan_with_schema DataSourceExec: file_groups={1 group: 
[[WORKSPACE_ROOT/parquet-testing/data/alltypes_plain.parquet]]}, 
projection=[id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, 
float_col, double_col, date_string_col, string_col, timestamp_col], limit=10, 
file_type=parquet, schema=[id:Int32;N, bool_col:Boolean;N, tinyint_col:Int32;N, 
smallint_col:Int32;N, int_col:Int32;N, bigint_col:Int64;N, float_col:Float32;N, 
double_col:Float64;N, date_string_col:BinaryView;N, [...]
+physical_plan_with_schema DataSourceExec: file_groups={1 group: 
[[WORKSPACE_ROOT/parquet-testing/data/alltypes_plain.parquet]]}, 
projection=[id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, 
float_col, double_col, date_string_col, string_col, timestamp_col], limit=10, 
file_type=parquet, schema=[id:Int32;N, bool_col:Boolean;N, tinyint_col:Int32;N, 
smallint_col:Int32;N, int_col:Int32;N, bigint_col:Int64;N, float_col:Float32;N, 
double_col:Float64;N, date_string_col:BinaryView;N, [...]
 
 
 statement ok
@@ -343,8 +343,8 @@ initial_physical_plan_with_stats
 01)GlobalLimitExec: skip=0, fetch=10, statistics=[Rows=Exact(8), Bytes=Absent, 
[(Col[0]: ScanBytes=Exact(32)),(Col[1]: ScanBytes=Inexact(24)),(Col[2]: 
ScanBytes=Exact(32)),(Col[3]: ScanBytes=Exact(32)),(Col[4]: 
ScanBytes=Exact(32)),(Col[5]: ScanBytes=Exact(64)),(Col[6]: 
ScanBytes=Exact(32)),(Col[7]: ScanBytes=Exact(64)),(Col[8]: 
ScanBytes=Inexact(88)),(Col[9]: ScanBytes=Inexact(49)),(Col[10]: 
ScanBytes=Exact(64))]]
 02)--DataSourceExec: file_groups={1 group: 
[[WORKSPACE_ROOT/parquet-testing/data/alltypes_plain.parquet]]}, 
projection=[id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, 
float_col, double_col, date_string_col, string_col, timestamp_col], limit=10, 
file_type=parquet, statistics=[Rows=Exact(8), Bytes=Absent, [(Col[0]: 
ScanBytes=Exact(32)),(Col[1]: ScanBytes=Inexact(24)),(Col[2]: 
ScanBytes=Exact(32)),(Col[3]: ScanBytes=Exact(32)),(Col[4]: 
ScanBytes=Exact(32)),(Col[5]: ScanBytes= [...]
 initial_physical_plan_with_schema
-01)GlobalLimitExec: skip=0, fetch=10, schema=[id:Int32;N, bool_col:Boolean;N, 
tinyint_col:Int32;N, smallint_col:Int32;N, int_col:Int32;N, bigint_col:Int64;N, 
float_col:Float32;N, double_col:Float64;N, date_string_col:BinaryView;N, 
string_col:BinaryView;N, timestamp_col:Timestamp(Nanosecond, None);N]
-02)--DataSourceExec: file_groups={1 group: 
[[WORKSPACE_ROOT/parquet-testing/data/alltypes_plain.parquet]]}, 
projection=[id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, 
float_col, double_col, date_string_col, string_col, timestamp_col], limit=10, 
file_type=parquet, schema=[id:Int32;N, bool_col:Boolean;N, tinyint_col:Int32;N, 
smallint_col:Int32;N, int_col:Int32;N, bigint_col:Int64;N, float_col:Float32;N, 
double_col:Float64;N, date_string_col:BinaryView;N, string_col:BinaryVie [...]
+01)GlobalLimitExec: skip=0, fetch=10, schema=[id:Int32;N, bool_col:Boolean;N, 
tinyint_col:Int32;N, smallint_col:Int32;N, int_col:Int32;N, bigint_col:Int64;N, 
float_col:Float32;N, double_col:Float64;N, date_string_col:BinaryView;N, 
string_col:BinaryView;N, timestamp_col:Timestamp(ns);N]
+02)--DataSourceExec: file_groups={1 group: 
[[WORKSPACE_ROOT/parquet-testing/data/alltypes_plain.parquet]]}, 
projection=[id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, 
float_col, double_col, date_string_col, string_col, timestamp_col], limit=10, 
file_type=parquet, schema=[id:Int32;N, bool_col:Boolean;N, tinyint_col:Int32;N, 
smallint_col:Int32;N, int_col:Int32;N, bigint_col:Int64;N, float_col:Float32;N, 
double_col:Float64;N, date_string_col:BinaryView;N, string_col:BinaryVie [...]
 physical_plan after OutputRequirements
 01)OutputRequirementExec: order_by=[], dist_by=Unspecified
 02)--GlobalLimitExec: skip=0, fetch=10
@@ -371,7 +371,7 @@ physical_plan after FilterPushdown(Post) SAME TEXT AS ABOVE
 physical_plan after SanityCheckPlan SAME TEXT AS ABOVE
 physical_plan DataSourceExec: file_groups={1 group: 
[[WORKSPACE_ROOT/parquet-testing/data/alltypes_plain.parquet]]}, 
projection=[id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, 
float_col, double_col, date_string_col, string_col, timestamp_col], limit=10, 
file_type=parquet
 physical_plan_with_stats DataSourceExec: file_groups={1 group: 
[[WORKSPACE_ROOT/parquet-testing/data/alltypes_plain.parquet]]}, 
projection=[id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, 
float_col, double_col, date_string_col, string_col, timestamp_col], limit=10, 
file_type=parquet, statistics=[Rows=Exact(8), Bytes=Absent, [(Col[0]: 
ScanBytes=Exact(32)),(Col[1]: ScanBytes=Inexact(24)),(Col[2]: 
ScanBytes=Exact(32)),(Col[3]: ScanBytes=Exact(32)),(Col[4]: 
ScanBytes=Exact(32)) [...]
-physical_plan_with_schema DataSourceExec: file_groups={1 group: 
[[WORKSPACE_ROOT/parquet-testing/data/alltypes_plain.parquet]]}, 
projection=[id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, 
float_col, double_col, date_string_col, string_col, timestamp_col], limit=10, 
file_type=parquet, schema=[id:Int32;N, bool_col:Boolean;N, tinyint_col:Int32;N, 
smallint_col:Int32;N, int_col:Int32;N, bigint_col:Int64;N, float_col:Float32;N, 
double_col:Float64;N, date_string_col:BinaryView;N, [...]
+physical_plan_with_schema DataSourceExec: file_groups={1 group: 
[[WORKSPACE_ROOT/parquet-testing/data/alltypes_plain.parquet]]}, 
projection=[id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, 
float_col, double_col, date_string_col, string_col, timestamp_col], limit=10, 
file_type=parquet, schema=[id:Int32;N, bool_col:Boolean;N, tinyint_col:Int32;N, 
smallint_col:Int32;N, int_col:Int32;N, bigint_col:Int64;N, float_col:Float32;N, 
double_col:Float64;N, date_string_col:BinaryView;N, [...]
 
 
 statement ok
diff --git a/datafusion/sqllogictest/test_files/group_by.slt 
b/datafusion/sqllogictest/test_files/group_by.slt
index db4ec83f10..294841552a 100644
--- a/datafusion/sqllogictest/test_files/group_by.slt
+++ b/datafusion/sqllogictest/test_files/group_by.slt
@@ -5478,7 +5478,7 @@ create table source as values
 ;
 
 statement ok
-create view t as select column1 as a, arrow_cast(column2, 
'Timestamp(Nanosecond, None)') as b from source;
+create view t as select column1 as a, arrow_cast(column2, 'Timestamp(ns)') as 
b from source;
 
 query IPI
 select a, b, count(*) from t group by a, b order by a, b;
diff --git a/datafusion/sqllogictest/test_files/information_schema.slt 
b/datafusion/sqllogictest/test_files/information_schema.slt
index 2039ee93df..2217e7f799 100644
--- a/datafusion/sqllogictest/test_files/information_schema.slt
+++ b/datafusion/sqllogictest/test_files/information_schema.slt
@@ -799,9 +799,9 @@ select * from information_schema.routines where 
routine_name = 'date_trunc' OR r
 ----
 datafusion public date_trunc datafusion public date_trunc FUNCTION true Date 
SCALAR Truncates a timestamp or time value to a specified precision. 
date_trunc(precision, expression)
 datafusion public date_trunc datafusion public date_trunc FUNCTION true String 
SCALAR Truncates a timestamp or time value to a specified precision. 
date_trunc(precision, expression)
-datafusion public date_trunc datafusion public date_trunc FUNCTION true 
Time(Nanosecond) SCALAR Truncates a timestamp or time value to a specified 
precision. date_trunc(precision, expression)
-datafusion public date_trunc datafusion public date_trunc FUNCTION true 
Timestamp(Nanosecond, None) SCALAR Truncates a timestamp or time value to a 
specified precision. date_trunc(precision, expression)
-datafusion public date_trunc datafusion public date_trunc FUNCTION true 
Timestamp(Nanosecond, Some("+TZ")) SCALAR Truncates a timestamp or time value 
to a specified precision. date_trunc(precision, expression)
+datafusion public date_trunc datafusion public date_trunc FUNCTION true 
Time(ns) SCALAR Truncates a timestamp or time value to a specified precision. 
date_trunc(precision, expression)
+datafusion public date_trunc datafusion public date_trunc FUNCTION true 
Timestamp(ns) SCALAR Truncates a timestamp or time value to a specified 
precision. date_trunc(precision, expression)
+datafusion public date_trunc datafusion public date_trunc FUNCTION true 
Timestamp(ns, "+TZ") SCALAR Truncates a timestamp or time value to a specified 
precision. date_trunc(precision, expression)
 datafusion public rank datafusion public rank FUNCTION true NULL WINDOW 
Returns the rank of the current row within its partition, allowing gaps between 
ranks. This function provides a ranking similar to `row_number`, but skips 
ranks for identical values. rank()
 datafusion public string_agg datafusion public string_agg FUNCTION true String 
AGGREGATE Concatenates the values of string expressions and places separator 
values between them. If ordering is required, strings are concatenated in the 
specified order. This aggregation function can only mix DISTINCT and ORDER BY 
if the ordering expression is exactly the same as the first argument 
expression. string_agg([DISTINCT] expression, delimiter [ORDER BY expression])
 
@@ -821,14 +821,14 @@ datafusion public date_trunc 1 IN precision String NULL 
false 1
 datafusion public date_trunc 2 IN expression String NULL false 1
 datafusion public date_trunc 1 OUT NULL String NULL false 1
 datafusion public date_trunc 1 IN precision String NULL false 2
-datafusion public date_trunc 2 IN expression Time(Nanosecond) NULL false 2
-datafusion public date_trunc 1 OUT NULL Time(Nanosecond) NULL false 2
+datafusion public date_trunc 2 IN expression Time(ns) NULL false 2
+datafusion public date_trunc 1 OUT NULL Time(ns) NULL false 2
 datafusion public date_trunc 1 IN precision String NULL false 3
-datafusion public date_trunc 2 IN expression Timestamp(Nanosecond, None) NULL 
false 3
-datafusion public date_trunc 1 OUT NULL Timestamp(Nanosecond, None) NULL false 
3
+datafusion public date_trunc 2 IN expression Timestamp(ns) NULL false 3
+datafusion public date_trunc 1 OUT NULL Timestamp(ns) NULL false 3
 datafusion public date_trunc 1 IN precision String NULL false 4
-datafusion public date_trunc 2 IN expression Timestamp(Nanosecond, 
Some("+TZ")) NULL false 4
-datafusion public date_trunc 1 OUT NULL Timestamp(Nanosecond, Some("+TZ")) 
NULL false 4
+datafusion public date_trunc 2 IN expression Timestamp(ns, "+TZ") NULL false 4
+datafusion public date_trunc 1 OUT NULL Timestamp(ns, "+TZ") NULL false 4
 datafusion public string_agg 2 IN delimiter Null NULL false 0
 datafusion public string_agg 1 IN expression String NULL false 0
 datafusion public string_agg 1 OUT NULL String NULL false 0
@@ -856,9 +856,9 @@ show functions like 'date_trunc';
 ----
 date_trunc Date [precision, expression] [String, Date] SCALAR Truncates a 
timestamp or time value to a specified precision. date_trunc(precision, 
expression)
 date_trunc String [precision, expression] [String, String] SCALAR Truncates a 
timestamp or time value to a specified precision. date_trunc(precision, 
expression)
-date_trunc Time(Nanosecond) [precision, expression] [String, Time(Nanosecond)] 
SCALAR Truncates a timestamp or time value to a specified precision. 
date_trunc(precision, expression)
-date_trunc Timestamp(Nanosecond, None) [precision, expression] [String, 
Timestamp(Nanosecond, None)] SCALAR Truncates a timestamp or time value to a 
specified precision. date_trunc(precision, expression)
-date_trunc Timestamp(Nanosecond, Some("+TZ")) [precision, expression] [String, 
Timestamp(Nanosecond, Some("+TZ"))] SCALAR Truncates a timestamp or time value 
to a specified precision. date_trunc(precision, expression)
+date_trunc Time(ns) [precision, expression] [String, Time(ns)] SCALAR 
Truncates a timestamp or time value to a specified precision. 
date_trunc(precision, expression)
+date_trunc Timestamp(ns) [precision, expression] [String, Timestamp(ns)] 
SCALAR Truncates a timestamp or time value to a specified precision. 
date_trunc(precision, expression)
+date_trunc Timestamp(ns, "+TZ") [precision, expression] [String, Timestamp(ns, 
"+TZ")] SCALAR Truncates a timestamp or time value to a specified precision. 
date_trunc(precision, expression)
 
 statement ok
 show functions
diff --git a/datafusion/sqllogictest/test_files/joins.slt 
b/datafusion/sqllogictest/test_files/joins.slt
index ae87fd11d3..35f750f8bb 100644
--- a/datafusion/sqllogictest/test_files/joins.slt
+++ b/datafusion/sqllogictest/test_files/joins.slt
@@ -146,10 +146,10 @@ AS VALUES
 statement ok
 CREATE TABLE test_timestamps_table as
 SELECT
-  arrow_cast(ts::timestamp::bigint, 'Timestamp(Nanosecond, None)') as nanos,
-  arrow_cast(ts::timestamp::bigint / 1000, 'Timestamp(Microsecond, None)') as 
micros,
-  arrow_cast(ts::timestamp::bigint / 1000000, 'Timestamp(Millisecond, None)') 
as millis,
-  arrow_cast(ts::timestamp::bigint / 1000000000, 'Timestamp(Second, None)') as 
secs,
+  arrow_cast(ts::timestamp::bigint, 'Timestamp(ns)') as nanos,
+  arrow_cast(ts::timestamp::bigint / 1000, 'Timestamp(µs)') as micros,
+  arrow_cast(ts::timestamp::bigint / 1000000, 'Timestamp(ms)') as millis,
+  arrow_cast(ts::timestamp::bigint / 1000000000, 'Timestamp(s)') as secs,
   names
 FROM
   test_timestamps_table_source;
diff --git a/datafusion/sqllogictest/test_files/scalar.slt 
b/datafusion/sqllogictest/test_files/scalar.slt
index 39f53f865b..3e03ab00c8 100644
--- a/datafusion/sqllogictest/test_files/scalar.slt
+++ b/datafusion/sqllogictest/test_files/scalar.slt
@@ -2010,8 +2010,8 @@ D false
 # test string_temporal_coercion
 query BBBBBBBBBB
 select
-  arrow_cast(to_timestamp('2020-01-01 01:01:11.1234567890Z'), 
'Timestamp(Second, None)') == '2020-01-01T01:01:11',
-  arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 
'Timestamp(Second, None)') == arrow_cast('2020-01-02T01:01:11', 'LargeUtf8'),
+  arrow_cast(to_timestamp('2020-01-01 01:01:11.1234567890Z'), 'Timestamp(s)') 
== '2020-01-01T01:01:11',
+  arrow_cast(to_timestamp('2020-01-02 01:01:11.1234567890Z'), 'Timestamp(s)') 
== arrow_cast('2020-01-02T01:01:11', 'LargeUtf8'),
   arrow_cast(to_timestamp('2020-01-03 01:01:11.1234567890Z'), 
'Time32(Second)') == '01:01:11',
   arrow_cast(to_timestamp('2020-01-04 01:01:11.1234567890Z'), 
'Time32(Second)') == arrow_cast('01:01:11', 'LargeUtf8'),
   arrow_cast(to_timestamp('2020-01-05 01:01:11.1234567890Z'), 
'Time64(Microsecond)') == '01:01:11.123456',
diff --git a/datafusion/sqllogictest/test_files/spark/string/format_string.slt 
b/datafusion/sqllogictest/test_files/spark/string/format_string.slt
index 048863ebfb..8ba3cfc951 100644
--- a/datafusion/sqllogictest/test_files/spark/string/format_string.slt
+++ b/datafusion/sqllogictest/test_files/spark/string/format_string.slt
@@ -931,13 +931,13 @@ Char: NULL
 
 ## NULL with timestamp format using arrow_cast
 query T
-SELECT format_string('Hour: %tH', arrow_cast(NULL, 'Timestamp(Nanosecond, 
None)'));
+SELECT format_string('Hour: %tH', arrow_cast(NULL, 'Timestamp(ns)'));
 ----
 Hour: null
 
 ## NULL with timestamp format using arrow_cast
 query T
-SELECT format_string('Month: %tB', arrow_cast(NULL, 'Timestamp(Nanosecond, 
None)'));
+SELECT format_string('Month: %tB', arrow_cast(NULL, 'Timestamp(ns)'));
 ----
 Month: null
 
@@ -967,25 +967,25 @@ Month: null
 
 ## NULL with timestamp format using arrow_cast
 query T
-SELECT format_string('Month: %tB', arrow_cast(NULL, 'Timestamp(Second, 
None)'));
+SELECT format_string('Month: %tB', arrow_cast(NULL, 'Timestamp(s)'));
 ----
 Month: null
 
 ## NULL with timestamp format using arrow_cast
 query T
-SELECT format_string('Month: %tB', arrow_cast(NULL, 'Timestamp(Millisecond, 
None)'));
+SELECT format_string('Month: %tB', arrow_cast(NULL, 'Timestamp(ms)'));
 ----
 Month: null
 
 ## NULL with timestamp format using arrow_cast
 query T
-SELECT format_string('Month: %tB', arrow_cast(NULL, 'Timestamp(Microsecond, 
None)'));
+SELECT format_string('Month: %tB', arrow_cast(NULL, 'Timestamp(µs)'));
 ----
 Month: null
 
 ## NULL with timestamp format using arrow_cast
 query T
-SELECT format_string('Month: %tB', arrow_cast(NULL, 'Timestamp(Nanosecond, 
None)'));
+SELECT format_string('Month: %tB', arrow_cast(NULL, 'Timestamp(ns)'));
 ----
 Month: null
 
@@ -1051,7 +1051,7 @@ Value: null
 
 ## NULL Timestamp with string format using arrow_cast
 query T
-SELECT format_string('Value: %s', arrow_cast(NULL, 'Timestamp(Nanosecond, 
None)'));
+SELECT format_string('Value: %s', arrow_cast(NULL, 'Timestamp(ns)'));
 ----
 Value: null
 
@@ -1717,49 +1717,49 @@ String: 52245000000000
 
 ## TimestampSecond with time formats
 query T
-SELECT format_string('Year: %tY', arrow_cast(1703512245, 'Timestamp(Second, 
None)'));
+SELECT format_string('Year: %tY', arrow_cast(1703512245, 'Timestamp(s)'));
 ----
 Year: 2023
 
 query T
-SELECT format_string('Month: %tm', arrow_cast(1703512245, 'Timestamp(Second, 
None)'));
+SELECT format_string('Month: %tm', arrow_cast(1703512245, 'Timestamp(s)'));
 ----
 Month: 12
 
 query T
-SELECT format_string('String: %s', arrow_cast(1703512245, 'Timestamp(Second, 
None)'));
+SELECT format_string('String: %s', arrow_cast(1703512245, 'Timestamp(s)'));
 ----
 String: 1703512245
 
 query T
-SELECT format_string('String: %S', arrow_cast(1703512245, 'Timestamp(Second, 
None)'));
+SELECT format_string('String: %S', arrow_cast(1703512245, 'Timestamp(s)'));
 ----
 String: 1703512245
 
 ## TimestampMillisecond with time formats
 query T
-SELECT format_string('ISO Date: %tF', arrow_cast(1703512245000, 
'Timestamp(Millisecond, None)'));
+SELECT format_string('ISO Date: %tF', arrow_cast(1703512245000, 
'Timestamp(ms)'));
 ----
 ISO Date: 2023-12-25
 
 query T
-SELECT format_string('String: %s', arrow_cast(1703512245000, 
'Timestamp(Millisecond, None)'));
+SELECT format_string('String: %s', arrow_cast(1703512245000, 'Timestamp(ms)'));
 ----
 String: 1703512245000
 
 ## TimestampMicrosecond with time formats
 query T
-SELECT format_string('Date: %tD', arrow_cast(1703512245000000, 
'Timestamp(Microsecond, None)'));
+SELECT format_string('Date: %tD', arrow_cast(1703512245000000, 
'Timestamp(µs)'));
 ----
 Date: 12/25/23
 
 query T
-SELECT format_string('String: %s', arrow_cast(1703512245000000, 
'Timestamp(Microsecond, None)'));
+SELECT format_string('String: %s', arrow_cast(1703512245000000, 
'Timestamp(µs)'));
 ----
 String: 1703512245000000
 
 query T
-SELECT format_string('String: %s', arrow_cast('2020-01-02 
01:01:11.1234567890Z', 'Timestamp(Nanosecond, None)'));
+SELECT format_string('String: %s', arrow_cast('2020-01-02 
01:01:11.1234567890Z', 'Timestamp(ns)'));
 ----
 String: 1577926871123456789
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to