ahshahid commented on code in PR #48252: URL: https://github.com/apache/spark/pull/48252#discussion_r1844722744
########## sql/api/src/main/scala/org/apache/spark/sql/catalyst/JavaTypeInference.scala: ########## @@ -148,34 +163,180 @@ object JavaTypeInference { // TODO: we should only collect properties that have getter and setter. However, some tests // pass in scala case class as java bean class which doesn't have getter and setter. val properties = getJavaBeanReadableProperties(c) - // add type variables from inheritance hierarchy of the class - val classTV = JavaTypeUtils.getTypeArguments(c, classOf[Object]).asScala.toMap ++ - typeVariables - // Note that the fields are ordered by name. - val fields = properties.map { property => - val readMethod = property.getReadMethod - val encoder = encoderFor(readMethod.getGenericReturnType, seenTypeSet + c, classTV) - // The existence of `javax.annotation.Nonnull`, means this field is not nullable. - val hasNonNull = readMethod.isAnnotationPresent(classOf[Nonnull]) - EncoderField( - property.getName, - encoder, - encoder.nullable && !hasNonNull, - Metadata.empty, - Option(readMethod.getName), - Option(property.getWriteMethod).map(_.getName)) + + // if the properties is empty and this is not a top level enclosing class, then we + // should not consider class as bean, as otherwise it will be treated as empty schema + // and loose the data on deser. + if (properties.isEmpty && seenTypeSet.nonEmpty) { + findBestEncoder(Seq(c), seenTypeSet, typeVariables, None, serializableEncodersOnly = true) Review Comment: will revert on this.. by going through the code. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org