allisonwang-db commented on code in PR #49818:
URL: https://github.com/apache/spark/pull/49818#discussion_r1946908178


##########
core/src/main/scala/org/apache/spark/api/python/PythonRunner.scala:
##########
@@ -90,13 +91,44 @@ private[spark] object PythonEvalType {
   }
 }
 
-private[spark] object BasePythonRunner {
+private[spark] object BasePythonRunner extends Logging {
 
   private[spark] lazy val faultHandlerLogDir = Utils.createTempDir(namePrefix 
= "faulthandler")
 
   private[spark] def faultHandlerLogPath(pid: Int): Path = {
     new File(faultHandlerLogDir, pid.toString).toPath
   }
+
+  private[spark] def pythonWorkerStatusMessageWithContext(
+      handle: Option[ProcessHandle],
+      worker: PythonWorker,
+      hasInputs: Boolean): MessageWithContext = {
+    log"handle.map(_.isAlive) = " +

Review Comment:
   nit: Instead of outputting this `(_.isAlive)`, can we explain a little bit 
what does this mean? E.g what's might be the issue when the handle is not 
active, etc .



##########
core/src/main/scala/org/apache/spark/internal/config/Python.scala:
##########
@@ -69,4 +69,18 @@ private[spark] object Python {
     .version("3.2.0")
     .booleanConf
     .createWithDefault(false)
+
+  private val PYTHON_WORKER_IDLE_TIMEOUT_SECONDS_KEY = 
"spark.python.worker.idleTimeoutSeconds"
+
+  val PYTHON_WORKER_IDLE_TIMEOUT_SECONDS = 
ConfigBuilder(PYTHON_WORKER_IDLE_TIMEOUT_SECONDS_KEY)

Review Comment:
   Just curious why do we need both a static conf and a SQL conf for this?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to