cloud-fan commented on code in PR #49340: URL: https://github.com/apache/spark/pull/49340#discussion_r1907056048
########## sql/api/src/main/scala/org/apache/spark/sql/types/StringType.scala: ########## @@ -129,6 +130,60 @@ case object StringType sealed trait StringConstraint +case object StringHelper extends PartialOrdering[StringConstraint] { + override def tryCompare(x: StringConstraint, y: StringConstraint): Option[Int] = { + (x, y) match { + case (NoConstraint, NoConstraint) => Some(0) + case (NoConstraint, _) => Some(-1) + case (_, NoConstraint) => Some(1) + case (FixedLength(l1), FixedLength(l2)) => Some(l2.compareTo(l1)) + case (FixedLength(l1), MaxLength(l2)) if l1 <= l2 => Some(1) + case (MaxLength(l1), FixedLength(l2)) if l1 >= l2 => Some(-1) + case (MaxLength(l1), MaxLength(l2)) => Some(l2.compareTo(l1)) + case _ => None + } + } + + override def lteq(x: StringConstraint, y: StringConstraint): Boolean = { + tryCompare(x, y).exists(_ <= 0) + } + + override def gteq(x: StringConstraint, y: StringConstraint): Boolean = { + tryCompare(x, y).exists(_ >= 0) + } + + override def equiv(x: StringConstraint, y: StringConstraint): Boolean = { + tryCompare(x, y).contains(0) + } + + def isPlainString(s: StringType): Boolean = equiv(s.constraint, NoConstraint) + + def isMoreConstrained(a: StringType, b: StringType): Boolean = + gteq(a.constraint, b.constraint) + + def tightestCommonString(s1: StringType, s2: StringType): Option[StringType] = { + if (s1.collationId != s2.collationId) { + return None + } + if (!SqlApiConf.get.preserveCharVarcharTypeInfo) { + return Some(StringType(s1.collationId)) Review Comment: It makes more sense to return normal StringType (NoConstraint) here. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org