This is an automated email from the ASF dual-hosted git repository.
jiayu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/sedona.git
The following commit(s) were added to refs/heads/master by this push:
new b1dca8675e [GH-2140] make DataFrameShims usable outside of sedona
(#2141)
b1dca8675e is described below
commit b1dca8675e2119cc5fb76ed5fbf1f825fc40844c
Author: James Willis <[email protected]>
AuthorDate: Tue Jul 22 23:46:35 2025 -0700
[GH-2140] make DataFrameShims usable outside of sedona (#2141)
---
.../apache/spark/sql/sedona_sql/DataFrameShims.scala | 19 +++++++++----------
1 file changed, 9 insertions(+), 10 deletions(-)
diff --git
a/spark/common/src/main/scala-spark-3/org/apache/spark/sql/sedona_sql/DataFrameShims.scala
b/spark/common/src/main/scala-spark-3/org/apache/spark/sql/sedona_sql/DataFrameShims.scala
index 32612d5f1a..20d38759c2 100644
---
a/spark/common/src/main/scala-spark-3/org/apache/spark/sql/sedona_sql/DataFrameShims.scala
+++
b/spark/common/src/main/scala-spark-3/org/apache/spark/sql/sedona_sql/DataFrameShims.scala
@@ -18,24 +18,23 @@
*/
package org.apache.spark.sql.sedona_sql
-import scala.reflect.ClassTag
-
import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.{Column, DataFrame, SparkSession}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Expression, Literal}
-import org.apache.spark.sql.expressions.UserDefinedAggregateFunction
import org.apache.spark.sql.execution.aggregate.ScalaUDAF
-import org.apache.spark.sql.functions.lit
+import org.apache.spark.sql.expressions.UserDefinedAggregateFunction
import org.apache.spark.sql.types.StructType
+import org.apache.spark.sql.{Column, DataFrame, SparkSession}
+
+import scala.reflect.ClassTag
-object DataFrameShims {
+private[sql] object DataFrameShims {
- private[sedona_sql] def wrapExpression[E <: Expression: ClassTag](args:
Any*): Column = {
+ def wrapExpression[E <: Expression: ClassTag](args: Any*): Column = {
wrapVarArgExpression[E](args)
}
- private[sedona_sql] def wrapVarArgExpression[E <: Expression: ClassTag](arg:
Seq[Any]): Column = {
+ def wrapVarArgExpression[E <: Expression: ClassTag](arg: Seq[Any]): Column =
{
val runtimeClass = implicitly[ClassTag[E]].runtimeClass
val exprArgs = arg.map(_ match {
case c: Column => c.expr
@@ -49,7 +48,7 @@ object DataFrameShims {
Column(expressionInstance)
}
- private[sedona_sql] def wrapAggregator[A <: UserDefinedAggregateFunction:
ClassTag](arg: Any*): Column = {
+ def wrapAggregator[A <: UserDefinedAggregateFunction: ClassTag](arg: Any*):
Column = {
val runtimeClass = implicitly[ClassTag[A]].runtimeClass
val exprArgs = arg.map(_ match {
case c: Column => c.expr
@@ -65,7 +64,7 @@ object DataFrameShims {
Column(scalaAggregator)
}
- private[sedona_sql] def createDataFrame(
+ def createDataFrame(
sparkSession: SparkSession,
rdd: RDD[InternalRow],
schema: StructType): DataFrame = {