richardc-db commented on code in PR #50849: URL: https://github.com/apache/spark/pull/50849#discussion_r2096557108
########## sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala: ########## @@ -319,3 +319,51 @@ case class RDDScanExec( override def getStream: Option[SparkDataStream] = stream } + +/** + * A special case of RDDScanExec that is used to represent a scan without a `FROM` clause. + * For example, 'select version()'. + * + * We do not extend `RDDScanExec` in order to avoid complexity due to `TreeNode.makeCopy` and + * `TreeNode`'s general use of reflection. + */ +case class OneRowRelationExec() extends LeafExecNode + with StreamSourceAwareSparkPlan + with InputRDDCodegen { + + override val nodeName: String = s"Scan OneRowRelation" + + override val output: Seq[Attribute] = Nil + + val rdd = session.sparkContext.parallelize(Seq(InternalRow()), 1) + + override lazy val metrics = Map( + "numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows")) + + protected override def doExecute(): RDD[InternalRow] = { + val numOutputRows = longMetric("numOutputRows") + rdd.mapPartitionsWithIndexInternal { (index, iter) => + val proj = UnsafeProjection.create(schema) + proj.initialize(index) + iter.map { r => + numOutputRows += 1 + proj(r) + } + } + } + + override def simpleString(maxFields: Int): String = { + s"$nodeName${truncatedString(output, "[", ",", "]", maxFields)}" Review Comment: the default implementation returns `Scan OneRowRelation`, while the existing implementation (using RDDScan) returns `Scan OneRowRelation[]`. I figured we shouldn't change this in the off chance that someone is relying on it. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org