cloud-fan commented on code in PR #52153: URL: https://github.com/apache/spark/pull/52153#discussion_r2312799843
########## sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala: ########## @@ -2785,6 +2785,41 @@ class DataFrameSuite extends QueryTest val df1 = df.select("a").orderBy("b").orderBy("all") checkAnswer(df1, Seq(Row(1), Row(4))) } + + test("SPARK-53401: repartitionById - should partition rows to the specified " + + "partition ID") { + val numPartitions = 10 + val df = spark.range(100).withColumn("p_id", col("id") % numPartitions) + + val repartitioned = df.repartitionById(numPartitions, $"p_id") + val result = repartitioned.withColumn("actual_p_id", spark_partition_id()) + + assert(result.filter(col("p_id") =!= col("actual_p_id")).count() == 0) + + assert(result.rdd.getNumPartitions == numPartitions) + } + + test("SPARK-53401: repartitionById - should fail when partition ID is null") { + val df = spark.range(10).withColumn("p_id", + when(col("id") < 5, col("id")).otherwise(lit(null).cast("long")) + ) + val repartitioned = df.repartitionById(5, $"p_id") + + val e = intercept[SparkException] { + repartitioned.collect() + } + assert(e.getCause.isInstanceOf[IllegalArgumentException]) + assert(e.getCause.getMessage.contains("The partition ID expression must not be null.")) + } + + test("SPARK-53401: repartitionById - should fail analysis for non-integral types") { + val df = spark.range(5).withColumn("s", lit("a")) + val e = intercept[AnalysisException] { + df.repartitionById(5, $"s").collect() + } + // Should fail with type error from DirectShufflePartitionID expression + assert(e.getMessage.contains("requires an integral type")) Review Comment: where do we throw this error now? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org