JingsongLi commented on a change in pull request #10721: [FLINK-15429][hive] HiveObjectConversion implementations need to hand… URL: https://github.com/apache/flink/pull/10721#discussion_r361930638
########## File path: flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/functions/hive/conversion/HiveInspectors.java ########## @@ -146,9 +146,9 @@ public static HiveObjectConversion getConversion(ObjectInspector inspector, Logi } else if (inspector instanceof TimestampObjectInspector) { conversion = hiveShim::toHiveTimestamp; } else if (inspector instanceof HiveCharObjectInspector) { - conversion = o -> new HiveChar((String) o, ((CharType) dataType).getLength()); + conversion = o -> o == null ? null : new HiveChar((String) o, ((CharType) dataType).getLength()); } else if (inspector instanceof HiveVarcharObjectInspector) { - conversion = o -> new HiveVarchar((String) o, ((VarCharType) dataType).getLength()); + conversion = o -> o == null ? null : new HiveVarchar((String) o, ((VarCharType) dataType).getLength()); } else if (inspector instanceof HiveDecimalObjectInspector) { conversion = o -> o == null ? null : HiveDecimal.create((BigDecimal) o); } else { Review comment: Is any cases cover `WritableHiveObjectConversion`? Is it a dead code? ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services