This is an automated email from the ASF dual-hosted git repository.

stigahuang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/impala.git

commit df84e777d6f379012bd0812dad44cf2cd6cc06e7
Author: Daniel Vanko <[email protected]>
AuthorDate: Thu Dec 4 14:41:54 2025 +0100

    IMPALA-14321: Add BINARY partition transform to Iceberg tables
    
    With this change we add support for IDENTITY, TRUNCATE and BUCKET
    partition transformation functions with binary parameter to Iceberg
    tables.
    
    Flatbuffer schema has changed, because when reading a string,
    flatbuffers tries to enforce UTF-8 encoding, which fails for arbitrary
    binary data. FbIcebergDataFile's raw_partition_fields is an array of
    ubyte arrays from now on.
    
    Testing:
    - Added TestBinary() in iceberg-functions-test.cc with truncate width
      edge cases
    - Extended iceberg-partitioned-insert-*.test files with binary_col
      partition tests
    - Verified partition pruning works correctly for BINARY predicates
      (NumFileMetadataRead metrics)
    
    Generated-by: Github Copilot (Claude Sonnet 4.5)
    Change-Id: I5fd1ef382aa064dad55445dea00fbd39caeca1d3
    Reviewed-on: http://gerrit.cloudera.org:8080/23783
    Reviewed-by: Impala Public Jenkins <[email protected]>
    Tested-by: Impala Public Jenkins <[email protected]>
---
 be/src/exec/file-metadata-utils.cc                 |  2 +-
 be/src/exprs/iceberg-functions-ir.cc               | 17 ++++-
 be/src/exprs/iceberg-functions-test.cc             | 75 ++++++++++++++++++--
 be/src/exprs/iceberg-functions.h                   |  1 +
 be/src/runtime/dml-exec-state.cc                   |  7 +-
 common/fbs/IcebergObjects.fbs                      |  6 +-
 common/function-registry/impala_functions.py       |  6 +-
 .../impala/analysis/IcebergPartitionField.java     |  4 ++
 .../org/apache/impala/analysis/ShowFilesStmt.java  |  2 +-
 .../org/apache/impala/catalog/FeIcebergTable.java  |  9 ++-
 .../impala/common/IcebergPredicateConverter.java   |  4 ++
 .../java/org/apache/impala/util/IcebergUtil.java   | 80 +++++++++++++++++++---
 .../java/org/apache/impala/util/StringUtils.java   | 10 +++
 .../queries/QueryTest/iceberg-drop-partition.test  | 45 +++++++++---
 .../iceberg-partitioned-insert-default.test        | 80 +++++++++++++---------
 .../QueryTest/iceberg-partitioned-insert-v1.test   | 80 +++++++++++++---------
 .../QueryTest/iceberg-partitioned-insert-v2.test   | 80 +++++++++++++---------
 .../QueryTest/iceberg-show-files-partition.test    | 51 ++++++++++++--
 18 files changed, 428 insertions(+), 131 deletions(-)

diff --git a/be/src/exec/file-metadata-utils.cc 
b/be/src/exec/file-metadata-utils.cc
index 11fe936fe..95fa164eb 100644
--- a/be/src/exec/file-metadata-utils.cc
+++ b/be/src/exec/file-metadata-utils.cc
@@ -133,7 +133,7 @@ void FileMetadataUtils::AddIcebergColumns(MemPool* 
mem_pool, Tuple** template_tu
       if (!text_converter.WriteSlot(slot_desc, *template_tuple,
                                     (const 
char*)transform->transform_value()->data(),
                                     transform->transform_value()->size(),
-                                    true, false,
+                                    /* copy_string = */ true, /* need_escape = 
*/ false,
                                     mem_pool)) {
         ErrorMsg error_msg(TErrorCode::GENERAL,
             Substitute("Could not parse partition value for "
diff --git a/be/src/exprs/iceberg-functions-ir.cc 
b/be/src/exprs/iceberg-functions-ir.cc
index 2029537c5..feb7e5bef 100644
--- a/be/src/exprs/iceberg-functions-ir.cc
+++ b/be/src/exprs/iceberg-functions-ir.cc
@@ -99,15 +99,26 @@ DecimalVal 
IcebergFunctions::TruncatePartitionTransformDecimalImpl(const T& deci
   return decimal_val - (((decimal_val % width) + width) % width);
 }
 
+template<bool is_binary>
 StringVal IcebergFunctions::TruncatePartitionTransform(FunctionContext* ctx,
     const StringVal& input, const IntVal& width) {
   if (!CheckInputsAndSetError(ctx, input, width)) return StringVal::null();
   if (input.len <= width.val) return input;
-  // String handled as UTF8 regardless of utf8_mode, because Iceberg spec 
states that
-  // character strings must be stored as UTF-8 encoded byte arrays.
-  return StringFunctions::Utf8Substring(ctx, input, 1, width.val);
+  if constexpr (is_binary) {
+    // String handled as binary data.
+    return StringVal::CopyFrom(ctx, input.ptr, width.val);
+  } else {
+    // String handled as UTF8 regardless of utf8_mode, because Iceberg spec 
states that
+    // character strings must be stored as UTF-8 encoded byte arrays.
+    return StringFunctions::Utf8Substring(ctx, input, 1, width.val);
+  }
 }
 
+template StringVal IcebergFunctions::TruncatePartitionTransform<true>(
+    FunctionContext*, const StringVal&, const IntVal&);
+template StringVal IcebergFunctions::TruncatePartitionTransform<false>(
+    FunctionContext*, const StringVal&, const IntVal&);
+
 template<typename T, typename W>
 T IcebergFunctions::TruncatePartitionTransformNumericImpl(FunctionContext* ctx,
     const T& input, const W& width) {
diff --git a/be/src/exprs/iceberg-functions-test.cc 
b/be/src/exprs/iceberg-functions-test.cc
index 7d7167ef5..ecd2bcf70 100644
--- a/be/src/exprs/iceberg-functions-test.cc
+++ b/be/src/exprs/iceberg-functions-test.cc
@@ -78,6 +78,8 @@ public:
   static void TestString();
 
   static void TestDecimal();
+
+  static void TestBinary();
 private:
   template<typename T>
   static void TestIntegerNumbersHelper();
@@ -158,6 +160,33 @@ void 
IcebergTruncatePartitionTransformTests::TestIntegerNumbersHelper() {
   TestIncorrectWidthParameter<T, T>(T(0));
 }
 
+template<>
+void IcebergTruncatePartitionTransformTests::TestIncorrectWidthParameter
+    <StringVal, IntVal>(const StringVal& input) {
+  // Check for error when width is zero.
+  FunctionContext* ctx = CreateFunctionContext();
+  StringVal ret_val =
+      IcebergFunctions::TruncatePartitionTransform<false>(ctx, input, 
IntVal(0));
+  EXPECT_TRUE(ret_val.is_null);
+  EXPECT_EQ(strcmp("Width parameter should be greater than zero.", 
ctx->error_msg()), 0);
+  ctx->impl()->Close();
+
+  // Check for error when width is negative.
+  ctx = CreateFunctionContext();
+  ret_val = IcebergFunctions::TruncatePartitionTransform<false>(ctx, input, 
IntVal(-1));
+  EXPECT_TRUE(ret_val.is_null);
+  EXPECT_EQ(strcmp("Width parameter should be greater than zero.", 
ctx->error_msg()), 0);
+  ctx->impl()->Close();
+
+  // Check for error when width is null.
+  ctx = CreateFunctionContext();
+  ret_val =
+      IcebergFunctions::TruncatePartitionTransform<false>(ctx, input, 
IntVal::null());
+  EXPECT_TRUE(ret_val.is_null);
+  EXPECT_EQ(strcmp("Width parameter should be greater than zero.", 
ctx->error_msg()), 0);
+  ctx->impl()->Close();
+}
+
 void IcebergTruncatePartitionTransformTests::TestString() {
   MemTracker m;
   MemPool pool(&m);
@@ -165,29 +194,29 @@ void IcebergTruncatePartitionTransformTests::TestString() 
{
   FunctionContext* ctx = CreateFunctionContext(&pool);
 
   int width = 10;
-  StringVal ret_val = IcebergFunctions::TruncatePartitionTransform(
+  StringVal ret_val = IcebergFunctions::TruncatePartitionTransform<false>(
       ctx, input, IntVal(width));
   EXPECT_EQ(ret_val.len, width);
   EXPECT_EQ(strncmp((char*)input.ptr, (char*)ret_val.ptr, width), 0);
 
   // Truncate width is longer than the input string.
-  ret_val = IcebergFunctions::TruncatePartitionTransform(ctx, input, 100);
+  ret_val = IcebergFunctions::TruncatePartitionTransform<false>(ctx, input, 
100);
   EXPECT_EQ(ret_val.len, input.len);
   EXPECT_EQ(strncmp((char*)input.ptr, (char*)ret_val.ptr, input.len), 0);
 
   // Truncate width is the same as the the input string length.
-  ret_val = IcebergFunctions::TruncatePartitionTransform(ctx, input, 
input.len);
+  ret_val = IcebergFunctions::TruncatePartitionTransform<false>(ctx, input, 
input.len);
   EXPECT_EQ(ret_val.len, input.len);
   EXPECT_EQ(strncmp((char*)input.ptr, (char*)ret_val.ptr, input.len), 0);
 
   // Test NULL input.
-  ret_val = IcebergFunctions::TruncatePartitionTransform(
+  ret_val = IcebergFunctions::TruncatePartitionTransform<false>(
       nullptr, StringVal::null(), IntVal(10));
   EXPECT_TRUE(ret_val.is_null);
 
   // Test empty string input.
   ctx = CreateFunctionContext(&pool);
-  ret_val = IcebergFunctions::TruncatePartitionTransform(ctx, "", 10);
+  ret_val = IcebergFunctions::TruncatePartitionTransform<false>(ctx, "", 10);
   EXPECT_EQ(ret_val.len, 0);
 
   TestIncorrectWidthParameter<StringVal, IntVal>(StringVal("input"));
@@ -195,6 +224,41 @@ void IcebergTruncatePartitionTransformTests::TestString() {
   pool.FreeAll();
 }
 
+void IcebergTruncatePartitionTransformTests::TestBinary() {
+  MemTracker m;
+  MemPool pool(&m);
+
+  // UTF-8 compatible bytes: "Hello " + UTF-8 encoded "世界" (world in Chinese) 
+ "!"
+  uint8_t binary_data[] = {0x48, 0x65, 0x6C, 0x6C, 0x6F, 0x20, 0xE4, 0xB8,
+                           0x96, 0xE7, 0x95, 0x8C, 0x21};
+  StringVal input(binary_data, sizeof(binary_data));
+  FunctionContext* ctx = CreateFunctionContext(&pool);
+
+  for (size_t width = 1; width <= sizeof(binary_data); ++width) {
+    StringVal ret_val = IcebergFunctions::TruncatePartitionTransform<true>(
+        ctx, input, IntVal(width));
+    EXPECT_EQ(ret_val.len, width);
+    EXPECT_EQ(memcmp((char*)input.ptr, (char*)ret_val.ptr, width), 0);
+  }
+
+  // Truncate width is longer than the input binary.
+  StringVal ret_val = IcebergFunctions::TruncatePartitionTransform<true>(ctx, 
input, 100);
+  EXPECT_EQ(ret_val.len, input.len);
+  EXPECT_EQ(memcmp((char*)input.ptr, (char*)ret_val.ptr, input.len), 0);
+
+  // Test NULL input.
+  ret_val = IcebergFunctions::TruncatePartitionTransform<true>(
+      nullptr, StringVal::null(), IntVal(10));
+  EXPECT_TRUE(ret_val.is_null);
+
+  // Test empty string input.
+  ctx = CreateFunctionContext(&pool);
+  ret_val = IcebergFunctions::TruncatePartitionTransform<true>(ctx, "", 10);
+  EXPECT_EQ(ret_val.len, 0);
+
+  pool.FreeAll();
+}
+
 void IcebergTruncatePartitionTransformTests::TestDecimal() {
   // Testing decimal in unit tests by invoking TruncatePartitionTransform seems
   // problematic as it queries ARG_TYPE_SIZE from FunctionContext. Apparently, 
it is not
@@ -580,6 +644,7 @@ TEST(TestIcebergFunctions, TruncateTransform) {
   IcebergTruncatePartitionTransformTests::TestIntegerNumbers();
   IcebergTruncatePartitionTransformTests::TestString();
   IcebergTruncatePartitionTransformTests::TestDecimal();
+  IcebergTruncatePartitionTransformTests::TestBinary();
 }
 
 TEST(TestIcebergFunctions, BucketTransform) {
diff --git a/be/src/exprs/iceberg-functions.h b/be/src/exprs/iceberg-functions.h
index 01aa0e640..7b305a239 100644
--- a/be/src/exprs/iceberg-functions.h
+++ b/be/src/exprs/iceberg-functions.h
@@ -46,6 +46,7 @@ public:
       const DecimalVal& input, const IntVal& width);
   static DecimalVal TruncatePartitionTransform(FunctionContext* ctx,
       const DecimalVal& input, const BigIntVal& width);
+  template<bool is_binary>
   static StringVal TruncatePartitionTransform(FunctionContext* ctx,
       const StringVal& input, const IntVal& width);
 
diff --git a/be/src/runtime/dml-exec-state.cc b/be/src/runtime/dml-exec-state.cc
index 6c5024338..408a760d5 100644
--- a/be/src/runtime/dml-exec-state.cc
+++ b/be/src/runtime/dml-exec-state.cc
@@ -585,9 +585,12 @@ string createIcebergDataFileString(
     ice_col_stats_vec.push_back(createIcebergColumnStats(fbb, it->first, 
it->second));
   }
 
-  vector<flatbuffers::Offset<flatbuffers::String>> raw_partition_fields;
+  vector<flatbuffers::Offset<FbIcebergPartitionField>> raw_partition_fields;
+
   for (const string& partition_name : partition.raw_partition_names) {
-    raw_partition_fields.push_back(fbb.CreateString(partition_name));
+    auto data = reinterpret_cast<const uint8_t*>(partition_name.data());
+    auto fb_vector = fbb.CreateVector(data, partition_name.size());
+    raw_partition_fields.push_back(CreateFbIcebergPartitionField(fbb, 
fb_vector));
   }
 
   flatbuffers::Offset<FbIcebergDataFile> data_file = 
CreateFbIcebergDataFile(fbb,
diff --git a/common/fbs/IcebergObjects.fbs b/common/fbs/IcebergObjects.fbs
index 6b6636769..f75c48766 100644
--- a/common/fbs/IcebergObjects.fbs
+++ b/common/fbs/IcebergObjects.fbs
@@ -60,6 +60,10 @@ table FbIcebergColumnStats {
   upper_bound: [ubyte];
 }
 
+table FbIcebergPartitionField {
+  field_value: [ubyte];
+}
+
 table FbIcebergDataFile {
   path: string;
   format: FbIcebergDataFileFormat = PARQUET;
@@ -67,7 +71,7 @@ table FbIcebergDataFile {
   file_size_in_bytes: long = 0;
   spec_id: ushort;
   partition_path: string;
-  raw_partition_fields: [string];
+  raw_partition_fields: [FbIcebergPartitionField];
   per_column_stats: [FbIcebergColumnStats];
 }
 
diff --git a/common/function-registry/impala_functions.py 
b/common/function-registry/impala_functions.py
index eb7a53197..f85fe750e 100644
--- a/common/function-registry/impala_functions.py
+++ b/common/function-registry/impala_functions.py
@@ -1169,7 +1169,9 @@ invisible_functions = [
   [['iceberg_truncate_transform'], 'DECIMAL', ['DECIMAL', 'BIGINT'],
       
'_ZN6impala16IcebergFunctions26TruncatePartitionTransformEPN10impala_udf15FunctionContextERKNS1_10DecimalValERKNS1_9BigIntValE'],
   [['iceberg_truncate_transform'], 'STRING', ['STRING', 'INT'],
-      
'_ZN6impala16IcebergFunctions26TruncatePartitionTransformEPN10impala_udf15FunctionContextERKNS1_9StringValERKNS1_6IntValE'],
+      
'_ZN6impala16IcebergFunctions26TruncatePartitionTransformILb0EEEN10impala_udf9StringValEPNS2_15FunctionContextERKS3_RKNS2_6IntValE'],
+  [['iceberg_truncate_transform'], 'BINARY', ['BINARY', 'INT'],
+      
'_ZN6impala16IcebergFunctions26TruncatePartitionTransformILb1EEEN10impala_udf9StringValEPNS2_15FunctionContextERKS3_RKNS2_6IntValE'],
 
   [['iceberg_bucket_transform'], 'INT', ['INT', 'INT'],
       
'_ZN6impala16IcebergFunctions24BucketPartitionTransformEPN10impala_udf15FunctionContextERKNS1_6IntValES6_'],
@@ -1179,6 +1181,8 @@ invisible_functions = [
       
'_ZN6impala16IcebergFunctions24BucketPartitionTransformEPN10impala_udf15FunctionContextERKNS1_10DecimalValERKNS1_6IntValE'],
   [['iceberg_bucket_transform'], 'INT', ['STRING', 'INT'],
       
'_ZN6impala16IcebergFunctions24BucketPartitionTransformEPN10impala_udf15FunctionContextERKNS1_9StringValERKNS1_6IntValE'],
+  [['iceberg_bucket_transform'], 'INT', ['BINARY', 'INT'],
+      
'_ZN6impala16IcebergFunctions24BucketPartitionTransformEPN10impala_udf15FunctionContextERKNS1_9StringValERKNS1_6IntValE'],
   [['iceberg_bucket_transform'], 'INT', ['DATE', 'INT'],
       
'_ZN6impala16IcebergFunctions24BucketPartitionTransformEPN10impala_udf15FunctionContextERKNS1_7DateValERKNS1_6IntValE'],
   [['iceberg_bucket_transform'], 'INT', ['TIMESTAMP', 'INT'],
diff --git 
a/fe/src/main/java/org/apache/impala/analysis/IcebergPartitionField.java 
b/fe/src/main/java/org/apache/impala/analysis/IcebergPartitionField.java
index 441727ec2..ec7a4c641 100644
--- a/fe/src/main/java/org/apache/impala/analysis/IcebergPartitionField.java
+++ b/fe/src/main/java/org/apache/impala/analysis/IcebergPartitionField.java
@@ -83,6 +83,10 @@ public class IcebergPartitionField extends StmtNode {
     return transform_.getTransformParam();
   }
 
+  public ScalarType getType() {
+    return type_;
+  }
+
   @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     transform_.analyze(analyzer);
diff --git a/fe/src/main/java/org/apache/impala/analysis/ShowFilesStmt.java 
b/fe/src/main/java/org/apache/impala/analysis/ShowFilesStmt.java
index 8fa467ce0..0e37a17f1 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ShowFilesStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ShowFilesStmt.java
@@ -151,7 +151,7 @@ public class ShowFilesStmt extends StatementBase implements 
SingleTableStmt {
         icebergPartitionExprs.add(converter.convert(expr));
       } catch (ImpalaException e) {
         throw new AnalysisException(
-            "Invalid partition filtering expression: " + expr.toSql());
+            "Invalid partition filtering expression: " + expr.toSql(), e);
       }
     }
 
diff --git a/fe/src/main/java/org/apache/impala/catalog/FeIcebergTable.java 
b/fe/src/main/java/org/apache/impala/catalog/FeIcebergTable.java
index c0f38d887..637613f74 100644
--- a/fe/src/main/java/org/apache/impala/catalog/FeIcebergTable.java
+++ b/fe/src/main/java/org/apache/impala/catalog/FeIcebergTable.java
@@ -23,6 +23,7 @@ import com.google.common.collect.Lists;
 import com.google.common.primitives.Ints;
 
 import java.io.IOException;
+import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -36,6 +37,7 @@ import java.util.TreeMap;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.stream.Collectors;
 
+import org.apache.commons.codec.binary.Hex;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FileStatus;
@@ -949,7 +951,12 @@ public interface FeIcebergTable extends FeFsTable {
         Object partValue = contentFile.partition().get(i, Object.class);
         String partValueString = null;
         if (partValue != null) {
-          partValueString = partValue.toString();
+          if (partValue instanceof ByteBuffer) {
+            // For binary value, convert to hexadecimal string according to 
Iceberg spec.
+            partValueString = Hex.encodeHexString((ByteBuffer) partValue);
+          } else {
+            partValueString = partValue.toString();
+          }
         }
         fieldNameToPartitionValue.put(spec.fields().get(i).name(), 
partValueString);
       }
diff --git 
a/fe/src/main/java/org/apache/impala/common/IcebergPredicateConverter.java 
b/fe/src/main/java/org/apache/impala/common/IcebergPredicateConverter.java
index 124d76a31..8825c8fd9 100644
--- a/fe/src/main/java/org/apache/impala/common/IcebergPredicateConverter.java
+++ b/fe/src/main/java/org/apache/impala/common/IcebergPredicateConverter.java
@@ -19,6 +19,7 @@ package org.apache.impala.common;
 
 import com.google.common.base.Preconditions;
 import java.math.BigDecimal;
+import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
@@ -168,6 +169,9 @@ public class IcebergPredicateConverter {
       case TIMESTAMP: return getIcebergTsValue(literal, column, schema_);
       case DATE: return ((DateLiteral) literal).getValue();
       case DECIMAL: return getIcebergDecimalValue(column, (NumericLiteral) 
literal);
+      // Wrapping the byte array into a ByteBuffer,
+      // so Iceberg handles it as a BinaryLiteral instead of FixedLiteral.
+      case BINARY: return ByteBuffer.wrap(((StringLiteral) 
literal).getBinValue());
       default: {
         throw new ImpalaRuntimeException(
             String.format("Unable to parse Iceberg value '%s' for type %s",
diff --git a/fe/src/main/java/org/apache/impala/util/IcebergUtil.java 
b/fe/src/main/java/org/apache/impala/util/IcebergUtil.java
index 0fa0788d3..8c8736cbd 100644
--- a/fe/src/main/java/org/apache/impala/util/IcebergUtil.java
+++ b/fe/src/main/java/org/apache/impala/util/IcebergUtil.java
@@ -27,6 +27,8 @@ import com.google.common.primitives.Longs;
 import com.google.flatbuffers.FlatBufferBuilder;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.time.DateTimeException;
 import java.time.Instant;
 import java.time.LocalDateTime;
@@ -35,6 +37,7 @@ import java.time.ZoneOffset;
 import java.time.temporal.ChronoUnit;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Base64;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -141,6 +144,11 @@ public class IcebergUtil {
   public static final String ICEBERG_REST_WAREHOUSE_LOCATION =
       "iceberg_rest_warehouse_location";
 
+  /**
+   * For BINARY type, we use ISO-8859-1 to preserve the byte values.
+   */
+  private static final Charset CHARSET_FOR_BINARY = 
StandardCharsets.ISO_8859_1;
+
   /**
    * Returns the corresponding catalog implementation for 'feTable'.
    */
@@ -767,9 +775,37 @@ public class IcebergUtil {
       return values.length;
     }
 
+    /**
+     * Copied from
+     * 
https://github.com/apache/iceberg/blob/ccb8bc43/core/src/main/java/org/apache/iceberg/PartitionData.java#L119
+     */
     @Override
     public <T> T get(int pos, Class<T> javaClass) {
-      return javaClass.cast(values[pos]);
+      Object value = get(pos);
+      if (value == null || javaClass.isInstance(value)) {
+        return javaClass.cast(value);
+      }
+
+      throw new IllegalArgumentException(
+          String.format(
+              "Wrong class, expected %s, but was %s, for object: %s",
+              javaClass.getName(), value.getClass().getName(), value));
+    }
+
+    /**
+     * Copied from
+     * 
https://github.com/apache/iceberg/blob/ccb8bc43/core/src/main/java/org/apache/iceberg/PartitionData.java#L132
+     */
+    public Object get(int pos) {
+      if (pos >= values.length) {
+        return null;
+      }
+
+      if (values[pos] instanceof byte[]) {
+        return ByteBuffer.wrap((byte[]) values[pos]);
+      }
+
+      return values[pos];
     }
 
     @Override
@@ -816,14 +852,22 @@ public class IcebergUtil {
     int path_i = 0;
     for (int i = 0; i < spec.getIcebergPartitionFieldsSize(); ++i) {
       IcebergPartitionField field = spec.getIcebergPartitionFields().get(i);
-      if (field.getTransformType() == TIcebergPartitionTransformType.VOID) 
continue;
+      TIcebergPartitionTransformType transformType = field.getTransformType();
+      if (transformType == TIcebergPartitionTransformType.VOID) continue;
 
       Preconditions.checkState(path_i < dataFile.rawPartitionFieldsLength());
-      String[] parts = dataFile.rawPartitionFields(path_i).split("=", 2);
+      ByteBuffer fieldByteBuffer =
+          dataFile.rawPartitionFields(path_i).fieldValueAsByteBuffer();
+
+      Charset charset = StandardCharsets.UTF_8;
+      if (field.getType() == org.apache.impala.catalog.Type.BINARY) {
+        charset = CHARSET_FOR_BINARY;
+      }
+      String partValueString = StringUtils.fromByteBuffer(fieldByteBuffer, 
charset);
+      String[] parts = partValueString.split("=", 2);
       Preconditions.checkArgument(parts.length == 2 && parts[0] != null &&
           field.getFieldName().equals(parts[0]), "Invalid partition: %s",
-          dataFile.rawPartitionFields(path_i));
-      TIcebergPartitionTransformType transformType = field.getTransformType();
+          partValueString);
       data.set(i, getPartitionValue(
           partitionType.fields().get(i).type(), transformType, parts[1]));
       ++path_i;
@@ -847,7 +891,11 @@ public class IcebergUtil {
         transformType == TIcebergPartitionTransformType.BUCKET ||
         transformType == TIcebergPartitionTransformType.DAY) {
       // These partition transforms are handled successfully by Iceberg's API.
-      return Conversions.fromPartitionString(type, stringValue);
+      if (type.typeId() == Type.TypeID.BINARY) {
+        return stringValue.getBytes(CHARSET_FOR_BINARY);
+      } else {
+        return Conversions.fromPartitionString(type, stringValue);
+      }
     }
     switch (transformType) {
       case YEAR: return parseYearToTransformYear(stringValue);
@@ -1070,7 +1118,13 @@ public class IcebergUtil {
     for (int i = 0; i < spec.fields().size(); ++i) {
       Object partValue = cf.partition().get(i, Object.class);
       if (partValue != null) {
-        partitionKeys.add(partValue.toString());
+        if (partValue instanceof ByteBuffer) {
+          String partValueString =
+              StringUtils.fromByteBuffer((ByteBuffer) partValue, 
CHARSET_FOR_BINARY);
+          partitionKeys.add(partValueString);
+        } else {
+          partitionKeys.add(partValue.toString());
+        }
       } else {
         partitionKeys.add("NULL");
       }
@@ -1161,7 +1215,17 @@ public class IcebergUtil {
       Object partValue = cf.partition().get(fieldIndex, Object.class);
       String partValueString;
       if (partValue != null) {
-        partValueString = partValue.toString();
+        if (partValue instanceof ByteBuffer) {
+          // For BINARY type, we need to encode the byte buffer to base64 
string, so
+          // TextConverter::WriteSlot in be/src/exec/text-converter.inline.h 
can decode
+          // it back correctly.
+          ByteBuffer buffer = (ByteBuffer) partValue;
+          byte[] bytes = new byte[buffer.remaining()];
+          buffer.duplicate().get(bytes);
+          partValueString = Base64.getEncoder().encodeToString(bytes);
+        } else {
+          partValueString = partValue.toString();
+        }
       } else {
         // This needs to be consistent with getPartitionValue().
         partValueString = MetaStoreUtil.DEFAULT_NULL_PARTITION_KEY_VALUE;
diff --git a/fe/src/main/java/org/apache/impala/util/StringUtils.java 
b/fe/src/main/java/org/apache/impala/util/StringUtils.java
index 2c8e49c5a..bb88d6e6e 100644
--- a/fe/src/main/java/org/apache/impala/util/StringUtils.java
+++ b/fe/src/main/java/org/apache/impala/util/StringUtils.java
@@ -20,6 +20,7 @@ package org.apache.impala.util;
 import java.nio.ByteBuffer;
 import java.nio.CharBuffer;
 import java.nio.charset.CharacterCodingException;
+import java.nio.charset.Charset;
 import java.nio.charset.StandardCharsets;
 
 import com.google.common.base.Preconditions;
@@ -52,4 +53,13 @@ public class StringUtils {
     }
   }
 
+  /**
+   * Converts a ByteBuffer to a String using the given charset.
+  */
+  public static String fromByteBuffer(ByteBuffer buf, Charset charset) {
+    byte[] bytes = new byte[buf.remaining()];
+    buf.duplicate().get(bytes);
+    return new String(bytes, charset);
+  }
+
 }
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-drop-partition.test
 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-drop-partition.test
index a94732ea9..43d18cadc 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-drop-partition.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-drop-partition.test
@@ -4,25 +4,28 @@
 CREATE TABLE iceberg_identity_partitions
   (identity_boolean boolean, identity_int int, identity_bigint bigint,
   identity_float float, identity_double double, identity_decimal 
decimal(20,10),
-  identity_date date, identity_timestamp timestamp, identity_string string)
+  identity_date date, identity_timestamp timestamp, identity_string string,
+  identity_binary binary)
 PARTITIONED BY SPEC
   (identity(identity_boolean), identity(identity_int), 
identity(identity_bigint),
   identity(identity_float), identity(identity_double), 
identity(identity_decimal),
-  identity(identity_date), identity(identity_string))
+  identity(identity_date), identity(identity_string), 
identity(identity_binary))
 STORED AS ICEBERG;
 CREATE TABLE iceberg_bucket_partitions
   (bucket_int int, bucket_bigint bigint, bucket_decimal decimal(20,10),
-  bucket_date date, bucket_timestamp timestamp, bucket_string string)
+  bucket_date date, bucket_timestamp timestamp, bucket_string string,
+  bucket_binary binary)
 PARTITIONED BY SPEC
   (bucket(5,bucket_int), bucket(5,bucket_bigint), bucket(5,bucket_decimal),
-  bucket(5,bucket_date), bucket(5,bucket_timestamp), bucket(5,bucket_string))
+  bucket(5,bucket_date), bucket(5,bucket_timestamp), bucket(5,bucket_string),
+  bucket(5,bucket_binary))
 STORED AS ICEBERG;
 CREATE TABLE iceberg_truncate_partitions
   (truncate_int int, truncate_bigint bigint, truncate_decimal decimal(20,10),
-  truncate_string string)
+  truncate_string string, truncate_binary binary)
 PARTITIONED BY SPEC
   (truncate(5,truncate_int), truncate(5,truncate_bigint), 
truncate(5,truncate_decimal),
-  truncate(5,truncate_string))
+  truncate(5,truncate_string), truncate(5,truncate_binary))
 STORED AS ICEBERG;
 CREATE TABLE iceberg_time_partitions
   (year_date date, year_timestamp timestamp, month_date date, month_timestamp 
timestamp,
@@ -74,16 +77,20 @@ INSERT INTO iceberg_identity_partitions(identity_string) 
VALUES ("string-transfo
 INSERT INTO iceberg_identity_partitions(identity_string) VALUES 
("string-transform-set");
 INSERT INTO iceberg_identity_partitions(identity_string) VALUES ("string"), 
("another-string");
 INSERT INTO iceberg_identity_partitions(identity_string) VALUES ("string"), 
("another-string");
+INSERT INTO iceberg_identity_partitions(identity_binary) VALUES 
(CAST('你好hello' AS BINARY));
+INSERT INTO iceberg_identity_partitions(identity_binary) VALUES 
(CAST(UNHEX('FF4433221100') AS BINARY));
 INSERT INTO iceberg_bucket_partitions(bucket_int) VALUES (100), (200);
 INSERT INTO iceberg_bucket_partitions(bucket_bigint) VALUES (100);
 INSERT INTO iceberg_bucket_partitions(bucket_decimal) VALUES (10);
 INSERT INTO iceberg_bucket_partitions(bucket_date) VALUES ("1526-01-12");
 INSERT INTO iceberg_bucket_partitions(bucket_string) VALUES ("string");
 INSERT INTO iceberg_bucket_partitions(bucket_timestamp) VALUES ("1583-04-02 
03:00:00");
+INSERT INTO iceberg_bucket_partitions(bucket_binary) VALUES 
(CAST(UNHEX('FF4433221100') AS BINARY));
 INSERT INTO iceberg_truncate_partitions(truncate_int) VALUES (131072);
 INSERT INTO iceberg_truncate_partitions(truncate_bigint) VALUES (68719476736);
 INSERT INTO iceberg_truncate_partitions(truncate_decimal) VALUES 
(100000.1234567891);
 INSERT INTO iceberg_truncate_partitions(truncate_string) VALUES 
('thisisalongstring');
+INSERT INTO iceberg_truncate_partitions(truncate_binary) VALUES 
(CAST('你好hello' AS BINARY));
 INSERT INTO iceberg_time_partitions(year_date) VALUES ('2077-05-06');
 INSERT INTO iceberg_time_partitions(month_date) VALUES ('2023-12-01');
 INSERT INTO iceberg_time_partitions(day_date) VALUES ('2023-12-01');
@@ -103,7 +110,7 @@ INSERT INTO iceberg_mixed_partitions(identity_int) VALUES 
(NULL);
 # Number of partitions for iceberg_identity_partitions before DROP PARTITION 
queries
 SELECT COUNT(1) FROM $DATABASE.iceberg_identity_partitions.`partitions`
 ---- RESULTS
-11
+13
 ---- TYPES
 BIGINT
 ====
@@ -173,6 +180,16 @@ ALTER TABLE iceberg_identity_partitions DROP PARTITION 
(identity(identity_string
 'Dropped 1 partition(s)'
 ====
 ---- QUERY
+ALTER TABLE iceberg_identity_partitions DROP PARTITION 
(identity(identity_binary) = CAST('你好hello' AS BINARY));
+---- RESULTS
+'Dropped 1 partition(s)'
+====
+---- QUERY
+ALTER TABLE iceberg_identity_partitions DROP PARTITION 
(identity(identity_binary) = CAST(UNHEX('FF4433221100') AS BINARY));
+---- RESULTS
+'Dropped 1 partition(s)'
+====
+---- QUERY
 # Number of partitions for iceberg_identity_partitions after DROP PARTITION 
queries
 SELECT COUNT(1) FROM $DATABASE.iceberg_identity_partitions.`partitions`
 ---- RESULTS
@@ -184,7 +201,7 @@ BIGINT
 # Number of partitions for iceberg_bucket_partitions before DROP PARTITION 
queries
 SELECT COUNT(1) FROM $DATABASE.iceberg_bucket_partitions.`partitions`
 ---- RESULTS
-7
+8
 ---- TYPES
 BIGINT
 ====
@@ -219,6 +236,11 @@ ALTER TABLE iceberg_bucket_partitions DROP PARTITION 
(bucket(5, bucket_string) =
 'Dropped 1 partition(s)'
 ====
 ---- QUERY
+ALTER TABLE iceberg_bucket_partitions DROP PARTITION (bucket(5, bucket_binary) 
= 1);
+---- RESULTS
+'Dropped 1 partition(s)'
+====
+---- QUERY
 # Number of partitions for iceberg_bucket_partitions after DROP PARTITION 
queries
 SELECT COUNT(1) FROM $DATABASE.iceberg_bucket_partitions.`partitions`
 ---- RESULTS
@@ -230,7 +252,7 @@ BIGINT
 # Number of partitions for iceberg_truncate_partitions before DROP PARTITION 
queries
 SELECT COUNT(1) FROM $DATABASE.iceberg_truncate_partitions.`partitions`
 ---- RESULTS
-4
+5
 ---- TYPES
 BIGINT
 ====
@@ -255,6 +277,11 @@ ALTER TABLE iceberg_truncate_partitions DROP PARTITION 
(truncate(5, truncate_str
 'Dropped 1 partition(s)'
 ====
 ---- QUERY
+ALTER TABLE iceberg_truncate_partitions DROP PARTITION (truncate(5, 
truncate_binary) = CAST(UNHEX("E4BDA0E5A5") AS BINARY));
+---- RESULTS
+'Dropped 1 partition(s)'
+====
+---- QUERY
 # Number of partitions for iceberg_truncate_partitions after DROP PARTITION 
queries
 SELECT COUNT(1) FROM $DATABASE.iceberg_truncate_partitions.`partitions`
 ---- RESULTS
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-partitioned-insert-default.test
 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-partitioned-insert-default.test
index 09690af1d..f530059ac 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-partitioned-insert-default.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-partitioned-insert-default.test
@@ -206,7 +206,8 @@ create table alltypes_part (
   double_col DOUBLE,
   date_col DATE,
   string_col STRING,
-  timestamp_col TIMESTAMP)
+  timestamp_col TIMESTAMP,
+  binary_col BINARY)
 partitioned by spec (
   id,
   bool_col,
@@ -215,7 +216,8 @@ partitioned by spec (
   float_col,
   double_col,
   date_col,
-  string_col)
+  string_col,
+  binary_col)
 stored as iceberg;
 ---- RESULTS
 'Table has been created.'
@@ -223,7 +225,8 @@ stored as iceberg;
 ---- QUERY
 insert into alltypes_part
 select id, bool_col, int_col, bigint_col, float_col, double_col,
-       CAST(date_string_col as date FORMAT 'MM/DD/YY'), string_col, 
timestamp_col
+       CAST(date_string_col as date FORMAT 'MM/DD/YY'), string_col, 
timestamp_col,
+       CAST(string_col as BINARY)
 from functional.alltypestiny;
 select count(*) from alltypes_part;
 ---- RESULTS
@@ -236,28 +239,28 @@ show files in alltypes_part;
 ---- LABELS
 Path,Size,Partition,EC Policy
 ---- RESULTS
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=0/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-01-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=1/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-01-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=2/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-02-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=3/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-02-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=4/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-03-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=5/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-03-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=6/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-04-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=7/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-04-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=0/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-01-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=1/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-01-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=2/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-02-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=3/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-02-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=4/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-03-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=5/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-03-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=6/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-04-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=7/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-04-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
 ---- TYPES
 STRING, STRING, STRING, STRING
 ====
 ---- QUERY
 SHOW PARTITIONS alltypes_part;
 ---- RESULTS
-'{"id":"0","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14245","string_col":"0"}',1,1
-'{"id":"1","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14245","string_col":"1"}',1,1
-'{"id":"2","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14276","string_col":"0"}',1,1
-'{"id":"3","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14276","string_col":"1"}',1,1
-'{"id":"4","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14304","string_col":"0"}',1,1
-'{"id":"5","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14304","string_col":"1"}',1,1
-'{"id":"6","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14335","string_col":"0"}',1,1
-'{"id":"7","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14335","string_col":"1"}',1,1
+'{"id":"0","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14245","string_col":"0","binary_col":"30"}',1,1
+'{"id":"1","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14245","string_col":"1","binary_col":"31"}',1,1
+'{"id":"2","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14276","string_col":"0","binary_col":"30"}',1,1
+'{"id":"3","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14276","string_col":"1","binary_col":"31"}',1,1
+'{"id":"4","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14304","string_col":"0","binary_col":"30"}',1,1
+'{"id":"5","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14304","string_col":"1","binary_col":"31"}',1,1
+'{"id":"6","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14335","string_col":"0","binary_col":"30"}',1,1
+'{"id":"7","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14335","string_col":"1","binary_col":"31"}',1,1
 ---- TYPES
 STRING, BIGINT, BIGINT
 ====
@@ -265,7 +268,8 @@ STRING, BIGINT, BIGINT
 # INSERTs with wrong value orderings are rejected.
 insert into alltypes_part
 select bool_col, id, int_col, bigint_col, float_col, double_col,
-       CAST(date_string_col as date FORMAT 'MM/DD/YY'), string_col, 
timestamp_col
+       CAST(date_string_col as date FORMAT 'MM/DD/YY'), string_col, 
timestamp_col,
+       CAST(string_col as BINARY)
 from functional.alltypestiny;
 select count(*) from alltypes_part;
 ---- CATCH
@@ -274,7 +278,8 @@ Expression 'id' (type: INT) would need to be cast to 
BOOLEAN for column 'bool_co
 ---- QUERY
 insert into alltypes_part
 select id, bool_col, int_col, bigint_col, float_col, double_col,
-       CAST(date_string_col as date FORMAT 'MM/DD/YY'), timestamp_col, 
string_col
+       CAST(date_string_col as date FORMAT 'MM/DD/YY'), timestamp_col, 
string_col,
+       CAST(string_col as BINARY)
 from functional.alltypestiny;
 select count(*) from alltypes_part;
 ---- CATCH
@@ -336,6 +341,17 @@ aggregation(SUM, NumRowGroups): 0
 aggregation(SUM, NumFileMetadataRead): 4
 ====
 ---- QUERY
+select count(*) from alltypes_part
+where binary_col = CAST('0' AS BINARY);
+---- RESULTS
+4
+---- TYPES
+BIGINT
+---- RUNTIME_PROFILE
+aggregation(SUM, NumRowGroups): 0
+aggregation(SUM, NumFileMetadataRead): 4
+====
+---- QUERY
 # 'timestamp_col' is not a partitioning column, so min/max stats will not be 
used to
 # eliminate row groups
 select count(*) from alltypes_part
@@ -362,7 +378,8 @@ insert into
     double_col,
     date_col,
     string_col,
-    timestamp_col
+    timestamp_col,
+    binary_col
   )
 select
   id,
@@ -372,7 +389,8 @@ select
   double_col,
   CAST(date_string_col as date FORMAT 'MM/DD/YY'),
   string_col,
-  timestamp_col
+  timestamp_col,
+  CAST(string_col as BINARY)
 from
   functional.alltypestiny;
 select count(*) from alltypes_part;
@@ -386,14 +404,14 @@ show files in alltypes_part_2;
 ---- LABELS
 Path,Size,Partition,EC Policy
 ---- RESULTS
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=0/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-01-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=1/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-01-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=2/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-02-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=3/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-02-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=4/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-03-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=5/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-03-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=6/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-04-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=7/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-04-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=0/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-01-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=1/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-01-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=2/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-02-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=3/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-02-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=4/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-03-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=5/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-03-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=6/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-04-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=7/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-04-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
 ---- TYPES
 STRING, STRING, STRING, STRING
 ====
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-partitioned-insert-v1.test
 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-partitioned-insert-v1.test
index 6c35c4818..2a3eb6311 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-partitioned-insert-v1.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-partitioned-insert-v1.test
@@ -209,7 +209,8 @@ create table alltypes_part (
   double_col DOUBLE,
   date_col DATE,
   string_col STRING,
-  timestamp_col TIMESTAMP)
+  timestamp_col TIMESTAMP,
+  binary_col BINARY)
 partitioned by spec (
   id,
   bool_col,
@@ -218,7 +219,8 @@ partitioned by spec (
   float_col,
   double_col,
   date_col,
-  string_col)
+  string_col,
+  binary_col)
 stored as iceberg
 tblproperties ('format-version'='1');
 ---- RESULTS
@@ -227,7 +229,8 @@ tblproperties ('format-version'='1');
 ---- QUERY
 insert into alltypes_part
 select id, bool_col, int_col, bigint_col, float_col, double_col,
-       CAST(date_string_col as date FORMAT 'MM/DD/YY'), string_col, 
timestamp_col
+       CAST(date_string_col as date FORMAT 'MM/DD/YY'), string_col, 
timestamp_col,
+       CAST(string_col as BINARY)
 from functional.alltypestiny;
 select count(*) from alltypes_part;
 ---- RESULTS
@@ -240,28 +243,28 @@ show files in alltypes_part;
 ---- LABELS
 Path,Size,Partition,EC Policy
 ---- RESULTS
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=0/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-01-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=1/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-01-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=2/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-02-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=3/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-02-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=4/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-03-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=5/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-03-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=6/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-04-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=7/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-04-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=0/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-01-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=1/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-01-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=2/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-02-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=3/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-02-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=4/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-03-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=5/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-03-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=6/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-04-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=7/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-04-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
 ---- TYPES
 STRING, STRING, STRING, STRING
 ====
 ---- QUERY
 SHOW PARTITIONS alltypes_part;
 ---- RESULTS
-'{"id":"0","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14245","string_col":"0"}',1,1
-'{"id":"1","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14245","string_col":"1"}',1,1
-'{"id":"2","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14276","string_col":"0"}',1,1
-'{"id":"3","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14276","string_col":"1"}',1,1
-'{"id":"4","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14304","string_col":"0"}',1,1
-'{"id":"5","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14304","string_col":"1"}',1,1
-'{"id":"6","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14335","string_col":"0"}',1,1
-'{"id":"7","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14335","string_col":"1"}',1,1
+'{"id":"0","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14245","string_col":"0","binary_col":"30"}',1,1
+'{"id":"1","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14245","string_col":"1","binary_col":"31"}',1,1
+'{"id":"2","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14276","string_col":"0","binary_col":"30"}',1,1
+'{"id":"3","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14276","string_col":"1","binary_col":"31"}',1,1
+'{"id":"4","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14304","string_col":"0","binary_col":"30"}',1,1
+'{"id":"5","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14304","string_col":"1","binary_col":"31"}',1,1
+'{"id":"6","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14335","string_col":"0","binary_col":"30"}',1,1
+'{"id":"7","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14335","string_col":"1","binary_col":"31"}',1,1
 ---- TYPES
 STRING, BIGINT, BIGINT
 ====
@@ -269,7 +272,8 @@ STRING, BIGINT, BIGINT
 # INSERTs with wrong value orderings are rejected.
 insert into alltypes_part
 select bool_col, id, int_col, bigint_col, float_col, double_col,
-       CAST(date_string_col as date FORMAT 'MM/DD/YY'), string_col, 
timestamp_col
+       CAST(date_string_col as date FORMAT 'MM/DD/YY'), string_col, 
timestamp_col,
+       CAST(string_col as BINARY)
 from functional.alltypestiny;
 select count(*) from alltypes_part;
 ---- CATCH
@@ -278,7 +282,8 @@ Expression 'id' (type: INT) would need to be cast to 
BOOLEAN for column 'bool_co
 ---- QUERY
 insert into alltypes_part
 select id, bool_col, int_col, bigint_col, float_col, double_col,
-       CAST(date_string_col as date FORMAT 'MM/DD/YY'), timestamp_col, 
string_col
+       CAST(date_string_col as date FORMAT 'MM/DD/YY'), timestamp_col, 
string_col,
+       CAST(string_col as BINARY)
 from functional.alltypestiny;
 select count(*) from alltypes_part;
 ---- CATCH
@@ -340,6 +345,17 @@ aggregation(SUM, NumRowGroups): 0
 aggregation(SUM, NumFileMetadataRead): 4
 ====
 ---- QUERY
+select count(*) from alltypes_part
+where binary_col = CAST('0' AS BINARY);
+---- RESULTS
+4
+---- TYPES
+BIGINT
+---- RUNTIME_PROFILE
+aggregation(SUM, NumRowGroups): 0
+aggregation(SUM, NumFileMetadataRead): 4
+====
+---- QUERY
 # 'timestamp_col' is not a partitioning column, so min/max stats will not be 
used to
 # eliminate row groups
 select count(*) from alltypes_part
@@ -366,7 +382,8 @@ insert into
     double_col,
     date_col,
     string_col,
-    timestamp_col
+    timestamp_col,
+    binary_col
   )
 select
   id,
@@ -376,7 +393,8 @@ select
   double_col,
   CAST(date_string_col as date FORMAT 'MM/DD/YY'),
   string_col,
-  timestamp_col
+  timestamp_col,
+  CAST(string_col as BINARY)
 from
   functional.alltypestiny;
 select count(*) from alltypes_part;
@@ -390,14 +408,14 @@ show files in alltypes_part_2;
 ---- LABELS
 Path,Size,Partition,EC Policy
 ---- RESULTS
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=0/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-01-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=1/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-01-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=2/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-02-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=3/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-02-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=4/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-03-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=5/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-03-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=6/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-04-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=7/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-04-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=0/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-01-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=1/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-01-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=2/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-02-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=3/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-02-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=4/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-03-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=5/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-03-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=6/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-04-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=7/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-04-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
 ---- TYPES
 STRING, STRING, STRING, STRING
 ====
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-partitioned-insert-v2.test
 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-partitioned-insert-v2.test
index c65326d17..351f4f55f 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-partitioned-insert-v2.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-partitioned-insert-v2.test
@@ -209,7 +209,8 @@ create table alltypes_part (
   double_col DOUBLE,
   date_col DATE,
   string_col STRING,
-  timestamp_col TIMESTAMP)
+  timestamp_col TIMESTAMP,
+  binary_col BINARY)
 partitioned by spec (
   id,
   bool_col,
@@ -218,7 +219,8 @@ partitioned by spec (
   float_col,
   double_col,
   date_col,
-  string_col)
+  string_col,
+  binary_col)
 stored as iceberg
 tblproperties ('format-version'='2');
 ---- RESULTS
@@ -227,7 +229,8 @@ tblproperties ('format-version'='2');
 ---- QUERY
 insert into alltypes_part
 select id, bool_col, int_col, bigint_col, float_col, double_col,
-       CAST(date_string_col as date FORMAT 'MM/DD/YY'), string_col, 
timestamp_col
+       CAST(date_string_col as date FORMAT 'MM/DD/YY'), string_col, 
timestamp_col,
+       CAST(string_col as BINARY)
 from functional.alltypestiny;
 select count(*) from alltypes_part;
 ---- RESULTS
@@ -240,28 +243,28 @@ show files in alltypes_part;
 ---- LABELS
 Path,Size,Partition,EC Policy
 ---- RESULTS
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=0/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-01-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=1/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-01-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=2/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-02-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=3/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-02-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=4/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-03-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=5/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-03-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=6/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-04-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=7/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-04-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=0/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-01-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=1/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-01-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=2/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-02-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=3/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-02-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=4/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-03-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=5/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-03-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=6/bool_col=true/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-04-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part/data/id=7/bool_col=false/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-04-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
 ---- TYPES
 STRING, STRING, STRING, STRING
 ====
 ---- QUERY
 SHOW PARTITIONS alltypes_part;
 ---- RESULTS
-'{"id":"0","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14245","string_col":"0"}',1,1
-'{"id":"1","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14245","string_col":"1"}',1,1
-'{"id":"2","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14276","string_col":"0"}',1,1
-'{"id":"3","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14276","string_col":"1"}',1,1
-'{"id":"4","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14304","string_col":"0"}',1,1
-'{"id":"5","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14304","string_col":"1"}',1,1
-'{"id":"6","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14335","string_col":"0"}',1,1
-'{"id":"7","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14335","string_col":"1"}',1,1
+'{"id":"0","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14245","string_col":"0","binary_col":"30"}',1,1
+'{"id":"1","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14245","string_col":"1","binary_col":"31"}',1,1
+'{"id":"2","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14276","string_col":"0","binary_col":"30"}',1,1
+'{"id":"3","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14276","string_col":"1","binary_col":"31"}',1,1
+'{"id":"4","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14304","string_col":"0","binary_col":"30"}',1,1
+'{"id":"5","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14304","string_col":"1","binary_col":"31"}',1,1
+'{"id":"6","bool_col":"true","int_col":"0","bigint_col":"0","float_col":"0.0","double_col":"0.0","date_col":"14335","string_col":"0","binary_col":"30"}',1,1
+'{"id":"7","bool_col":"false","int_col":"1","bigint_col":"10","float_col":"1.1","double_col":"10.1","date_col":"14335","string_col":"1","binary_col":"31"}',1,1
 ---- TYPES
 STRING, BIGINT, BIGINT
 ====
@@ -269,7 +272,8 @@ STRING, BIGINT, BIGINT
 # INSERTs with wrong value orderings are rejected.
 insert into alltypes_part
 select bool_col, id, int_col, bigint_col, float_col, double_col,
-       CAST(date_string_col as date FORMAT 'MM/DD/YY'), string_col, 
timestamp_col
+       CAST(date_string_col as date FORMAT 'MM/DD/YY'), string_col, 
timestamp_col,
+       CAST(string_col as BINARY)
 from functional.alltypestiny;
 select count(*) from alltypes_part;
 ---- CATCH
@@ -278,7 +282,8 @@ Expression 'id' (type: INT) would need to be cast to 
BOOLEAN for column 'bool_co
 ---- QUERY
 insert into alltypes_part
 select id, bool_col, int_col, bigint_col, float_col, double_col,
-       CAST(date_string_col as date FORMAT 'MM/DD/YY'), timestamp_col, 
string_col
+       CAST(date_string_col as date FORMAT 'MM/DD/YY'), timestamp_col, 
string_col,
+       CAST(string_col as BINARY)
 from functional.alltypestiny;
 select count(*) from alltypes_part;
 ---- CATCH
@@ -340,6 +345,17 @@ aggregation(SUM, NumRowGroups): 0
 aggregation(SUM, NumFileMetadataRead): 4
 ====
 ---- QUERY
+select count(*) from alltypes_part
+where binary_col = CAST('0' AS BINARY);
+---- RESULTS
+4
+---- TYPES
+BIGINT
+---- RUNTIME_PROFILE
+aggregation(SUM, NumRowGroups): 0
+aggregation(SUM, NumFileMetadataRead): 4
+====
+---- QUERY
 # 'timestamp_col' is not a partitioning column, so min/max stats will not be 
used to
 # eliminate row groups
 select count(*) from alltypes_part
@@ -366,7 +382,8 @@ insert into
     double_col,
     date_col,
     string_col,
-    timestamp_col
+    timestamp_col,
+    binary_col
   )
 select
   id,
@@ -376,7 +393,8 @@ select
   double_col,
   CAST(date_string_col as date FORMAT 'MM/DD/YY'),
   string_col,
-  timestamp_col
+  timestamp_col,
+  CAST(string_col as BINARY)
 from
   functional.alltypestiny;
 select count(*) from alltypes_part;
@@ -390,14 +408,14 @@ show files in alltypes_part_2;
 ---- LABELS
 Path,Size,Partition,EC Policy
 ---- RESULTS
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=0/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-01-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=1/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-01-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=2/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-02-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=3/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-02-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=4/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-03-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=5/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-03-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=6/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-04-01/string_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
-row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=7/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-04-01/string_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=0/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-01-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=1/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-01-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=2/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-02-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=3/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-02-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=4/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-03-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=5/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-03-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=6/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=0/bigint_col=0/float_col=0/double_col=0/date_col=2009-04-01/string_col=0/binary_col=0/.*.0.parq','.*','','$ERASURECODE_POLICY'
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/alltypes_part_2/data/id=7/bool_col=__HIVE_DEFAULT_PARTITION__/int_col=1/bigint_col=10/float_col=1.100000023841858/double_col=10.1/date_col=2009-04-01/string_col=1/binary_col=1/.*.0.parq','.*','','$ERASURECODE_POLICY'
 ---- TYPES
 STRING, STRING, STRING, STRING
 ====
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-show-files-partition.test
 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-show-files-partition.test
index 6ceaa1a0a..4cba6a15a 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-show-files-partition.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-show-files-partition.test
@@ -4,25 +4,28 @@
 CREATE TABLE iceberg_identity_partitions_showfiles
   (identity_boolean boolean, identity_int int, identity_bigint bigint,
   identity_float float, identity_double double, identity_decimal 
decimal(20,10),
-  identity_date date, identity_timestamp timestamp, identity_string string)
+  identity_date date, identity_timestamp timestamp, identity_string string,
+  identity_binary binary)
 PARTITIONED BY SPEC
   (identity(identity_boolean), identity(identity_int), 
identity(identity_bigint),
   identity(identity_float), identity(identity_double), 
identity(identity_decimal),
-  identity(identity_date), identity(identity_string))
+  identity(identity_date), identity(identity_string), 
identity(identity_binary))
 STORED AS ICEBERG;
 CREATE TABLE iceberg_bucket_partitions_showfiles
   (bucket_int int, bucket_bigint bigint, bucket_decimal decimal(20,10),
-  bucket_date date, bucket_timestamp timestamp, bucket_string string)
+  bucket_date date, bucket_timestamp timestamp, bucket_string string,
+  bucket_binary binary)
 PARTITIONED BY SPEC
   (bucket(5,bucket_int), bucket(5,bucket_bigint), bucket(5,bucket_decimal),
-  bucket(5,bucket_date), bucket(5,bucket_timestamp), bucket(5,bucket_string))
+  bucket(5,bucket_date), bucket(5,bucket_timestamp), bucket(5,bucket_string),
+  bucket(5,bucket_binary))
 STORED AS ICEBERG;
 CREATE TABLE iceberg_truncate_partitions_showfiles
   (truncate_int int, truncate_bigint bigint, truncate_decimal decimal(20,10),
-  truncate_string string)
+  truncate_string string, truncate_binary binary)
 PARTITIONED BY SPEC
   (truncate(5,truncate_int), truncate(5,truncate_bigint), 
truncate(5,truncate_decimal),
-  truncate(5,truncate_string))
+  truncate(5,truncate_string), truncate(5,truncate_binary))
 STORED AS ICEBERG;
 CREATE TABLE iceberg_time_partitions_showfiles
   (year_date date, year_timestamp timestamp, month_date date, month_timestamp 
timestamp,
@@ -75,16 +78,20 @@ INSERT INTO 
iceberg_identity_partitions_showfiles(identity_string) VALUES ("stri
 INSERT INTO iceberg_identity_partitions_showfiles(identity_string) VALUES 
("string-transform-set");
 INSERT INTO iceberg_identity_partitions_showfiles(identity_string) VALUES 
("string"), ("another-string");
 INSERT INTO iceberg_identity_partitions_showfiles(identity_string) VALUES 
("string"), ("another-string");
+INSERT INTO iceberg_identity_partitions_showfiles(identity_binary) VALUES 
(CAST('你好hello' AS BINARY));
+INSERT INTO iceberg_identity_partitions_showfiles(identity_binary) VALUES 
(CAST(UNHEX('FF4433221100') AS BINARY));
 INSERT INTO iceberg_bucket_partitions_showfiles(bucket_int) VALUES (100), 
(200);
 INSERT INTO iceberg_bucket_partitions_showfiles(bucket_bigint) VALUES (100);
 INSERT INTO iceberg_bucket_partitions_showfiles(bucket_decimal) VALUES (10);
 INSERT INTO iceberg_bucket_partitions_showfiles(bucket_date) VALUES 
("1526-01-12");
 INSERT INTO iceberg_bucket_partitions_showfiles(bucket_string) VALUES 
("string");
 INSERT INTO iceberg_bucket_partitions_showfiles(bucket_timestamp) VALUES 
("1583-04-02 03:00:00");
+INSERT INTO iceberg_bucket_partitions_showfiles(bucket_binary) VALUES 
(CAST(UNHEX('FF4433221100') AS BINARY));
 INSERT INTO iceberg_truncate_partitions_showfiles(truncate_int) VALUES 
(131072);
 INSERT INTO iceberg_truncate_partitions_showfiles(truncate_bigint) VALUES 
(68719476736);
 INSERT INTO iceberg_truncate_partitions_showfiles(truncate_decimal) VALUES 
(100000.1234567891);
 INSERT INTO iceberg_truncate_partitions_showfiles(truncate_string) VALUES 
('thisisalongstring');
+INSERT INTO iceberg_truncate_partitions_showfiles(truncate_binary) VALUES 
(CAST('你好hello' AS BINARY));
 INSERT INTO iceberg_time_partitions_showfiles(year_date) VALUES ('2077-05-06');
 INSERT INTO iceberg_time_partitions_showfiles(month_date) VALUES 
('2023-12-01');
 INSERT INTO iceberg_time_partitions_showfiles(day_date) VALUES ('2023-12-01');
@@ -197,6 +204,22 @@ 
row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/iceberg_identity_partitions_sho
 STRING, STRING, STRING, STRING
 ====
 ---- QUERY
+# Show files for identity partitions - binary "CAST('你好hello' AS BINARY)"
+SHOW FILES IN iceberg_identity_partitions_showfiles PARTITION 
(identity(identity_binary) = CAST('你好hello' AS BINARY));
+---- RESULTS: VERIFY_IS_EQUAL_SORTED
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/iceberg_identity_partitions_showfiles/data/.*identity_binary=%E4%BD%A0%E5%A5%BDhello.*_data.*.parq','.*','','$ERASURECODE_POLICY'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
+# Show files for identity partitions - binary "CAST(UNHEX('FF4433221100') AS 
BINARY)"
+SHOW FILES IN iceberg_identity_partitions_showfiles PARTITION 
(identity(identity_binary) = CAST(UNHEX('FF4433221100') AS BINARY));
+---- RESULTS: VERIFY_IS_EQUAL_SORTED
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/iceberg_identity_partitions_showfiles/data/.*identity_binary=%FFD3%22%11%00.*_data.*.parq','.*','','$ERASURECODE_POLICY'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
 # Show files for bucket partitions - int bucket range
 SHOW FILES IN iceberg_bucket_partitions_showfiles PARTITION (bucket(5, 
bucket_int) in (1,2));
 ---- RESULTS: VERIFY_IS_EQUAL_SORTED
@@ -246,6 +269,14 @@ 
row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/iceberg_bucket_partitions_showf
 STRING, STRING, STRING, STRING
 ====
 ---- QUERY
+# Show files for bucket partitions - binary bucket
+SHOW FILES IN iceberg_bucket_partitions_showfiles PARTITION (bucket(5, 
bucket_binary) = 1);
+---- RESULTS
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/iceberg_bucket_partitions_showfiles/data/.*bucket_binary_bucket=1.*_data.*.parq','.*','','$ERASURECODE_POLICY'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
 # Show files for truncate partitions - int truncate
 SHOW FILES IN iceberg_truncate_partitions_showfiles PARTITION (truncate(5, 
truncate_int) = 131070);
 ---- RESULTS
@@ -278,6 +309,14 @@ 
row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/iceberg_truncate_partitions_sho
 STRING, STRING, STRING, STRING
 ====
 ---- QUERY
+# Show files for truncate partitions - string truncate
+SHOW FILES IN iceberg_truncate_partitions_showfiles PARTITION (truncate(5, 
truncate_binary) = CAST(UNHEX("E4BDA0E5A5") AS BINARY));
+---- RESULTS
+row_regex:'$NAMENODE/test-warehouse/$DATABASE.db/iceberg_truncate_partitions_showfiles/data/.*truncate_binary_trunc=%E4%BD%A0%E5%A5.*_data.*.parq','.*','','$ERASURECODE_POLICY'
+---- TYPES
+STRING, STRING, STRING, STRING
+====
+---- QUERY
 # Show files for time partitions - year from date
 SHOW FILES IN iceberg_time_partitions_showfiles PARTITION (year(year_date) = 
'2077');
 ---- RESULTS

Reply via email to