beliefer commented on code in PR #49961:
URL: https://github.com/apache/spark/pull/49961#discussion_r1990448785


##########
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/python/PythonScan.scala:
##########
@@ -16,26 +16,44 @@
  */
 package org.apache.spark.sql.execution.datasources.v2.python
 
+import org.apache.commons.lang3.StringUtils
+
 import org.apache.spark.JobArtifactSet
+import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.connector.metric.CustomMetric
 import org.apache.spark.sql.connector.read._
 import org.apache.spark.sql.connector.read.streaming.MicroBatchStream
+import org.apache.spark.sql.internal.connector.SupportsMetadata
+import org.apache.spark.sql.sources.Filter
 import org.apache.spark.sql.types.StructType
 import org.apache.spark.sql.util.CaseInsensitiveStringMap
-
+import org.apache.spark.util.Utils
 
 class PythonScan(
-     ds: PythonDataSourceV2,
-     shortName: String,
-     outputSchema: StructType,
-     options: CaseInsensitiveStringMap) extends Scan {
+    ds: PythonDataSourceV2,
+    shortName: String,
+    outputSchema: StructType,
+    options: CaseInsensitiveStringMap,
+    supportedFilters: Array[Filter]
+) extends Scan
+    with SupportsMetadata {
+  private lazy val sparkSession = SparkSession.active
 
   override def toBatch: Batch = new PythonBatch(ds, shortName, outputSchema, 
options)
 
   override def toMicroBatchStream(checkpointLocation: String): 
MicroBatchStream =
     new PythonMicroBatchStream(ds, shortName, outputSchema, options)
 
-  override def description: String = "(Python)"
+  override def description: String = {
+    val metadataStr = getMetaData().toSeq.sorted.map {
+      case (key, value) =>
+        val maxMetadataValueLength = 
sparkSession.sessionState.conf.maxMetadataStringLength
+        val redactedValue =
+          Utils.redact(sparkSession.sessionState.conf.stringRedactionPattern, 
value)
+        key + ": " + StringUtils.abbreviate(redactedValue, 
maxMetadataValueLength)

Review Comment:
   ```suggestion
           val redactedValue =
             
Utils.redact(sparkSession.sessionState.conf.stringRedactionPattern, value)
           key + ": " + StringUtils.abbreviate(
             redactedValue, 
sparkSession.sessionState.conf.maxMetadataStringLength)
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to