anishshri-db commented on code in PR #50595: URL: https://github.com/apache/spark/pull/50595#discussion_r2064680618
########## sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/StateStore.scala: ########## @@ -1111,60 +1149,52 @@ object StateStore extends Logging { } } + // Block until we can process this partition + private def awaitProcessThisPartition( + id: StateStoreProviderId, + storeConf: StateStoreConf): Boolean = { + maintenanceThreadPoolLock.synchronized { + val timeoutMs = storeConf.stateStoreMaintenanceProcessingTimeout * 1000 + val endTime = System.currentTimeMillis() + timeoutMs + + // Try to process immediately first + if (processThisPartition(id)) return true + + // Wait with timeout and process after notification + def timeRemaining: Long = endTime - System.currentTimeMillis() + + while (timeRemaining > 0) { + maintenanceThreadPoolLock.wait(Math.min(timeRemaining, 10000)) + if (processThisPartition(id)) return true + } + + // Timeout reached without successfully processing the partition + return false + } + } + + private def doMaintenance(): Unit = doMaintenance(StateStoreConf.empty) + /** * Execute background maintenance task in all the loaded store providers if they are still * the active instances according to the coordinator. */ - private def doMaintenance(): Unit = { + private def doMaintenance( + storeConf: StateStoreConf + ): Unit = { logDebug("Doing maintenance") if (SparkEnv.get == null) { throw new IllegalStateException("SparkEnv not active, cannot do maintenance on StateStores") } + + // Process queued providers first + processQueuedProviders(storeConf) Review Comment: Lets read from to close provider list first and then submit each task for each provider ? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org