steven-aerts commented on code in PR #49163:
URL: https://github.com/apache/spark/pull/49163#discussion_r2084495288


##########
core/src/main/scala/org/apache/spark/internal/config/package.scala:
##########
@@ -283,6 +283,13 @@ package object config {
       .booleanConf
       .createWithDefault(true)
 
+  private[spark] val EVENT_LOG_READER_MAX_STRING_LENGTH =

Review Comment:
   Done



##########
core/src/main/scala/org/apache/spark/util/JsonProtocol.scala:
##########
@@ -1693,3 +1638,46 @@ private[spark] object JsonProtocol extends JsonUtils {
     }
   }
 }
+
+@deprecated("use new JsonProtocol(sparkConf) instead", "4.1.0")
+private[spark] object JsonProtocol extends JsonProtocol(new SparkConf()) {

Review Comment:
   Done



##########
core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala:
##########
@@ -984,25 +993,40 @@ class JsonProtocolSuite extends SparkFunSuite {
     val gettingResult = SparkListenerTaskGettingResult(taskInfo)
 
     assert(
-      
stageSubmittedFromJson(sparkEventToJsonString(stageSubmitted)).stageInfo.accumulables.isEmpty)
+      jsonProtocol.stageSubmittedFromJson(
+        
jsonProtocol.sparkEventToJsonString(stageSubmitted)).stageInfo.accumulables.isEmpty)
     assert(
-      
taskStartFromJson(sparkEventToJsonString(taskStart)).taskInfo.accumulables.isEmpty)
+      jsonProtocol.taskStartFromJson(
+        
jsonProtocol.sparkEventToJsonString(taskStart)).taskInfo.accumulables.isEmpty)
     assert(
-      taskGettingResultFromJson(sparkEventToJsonString(gettingResult))
+      jsonProtocol.taskGettingResultFromJson(
+          jsonProtocol.sparkEventToJsonString(gettingResult))
         .taskInfo.accumulables.isEmpty)
 
     // Deliberately not fixed for job starts because a job might legitimately 
reference
     // stages that have completed even before the job start event is emitted.
-    testEvent(jobStart, sparkEventToJsonString(jobStart))
+    testEvent(jobStart, jsonProtocol.sparkEventToJsonString(jobStart))
+  }
+
+  test("SPARK-49872: allow to limit json reader string sizes") {
+    val bigStringEvent = SparkListenerExecutorUnexcluded(
+      executorUnexcludedTime, "a".repeat(10_000))
+    val jsonString = jsonProtocol.sparkEventToJsonString(bigStringEvent)
+    assert(jsonProtocol.sparkEventFromJson(jsonString) == bigStringEvent)
+    val jsonProtocolWithLimit = new JsonProtocol(new SparkConf()
+      .set(EVENT_LOG_READER_MAX_STRING_LENGTH, 1_000))
+    assertThrows[com.fasterxml.jackson.core.JsonProcessingException] {

Review Comment:
   Done



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to