This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 8b3e3458346a [SPARK-55881][SQL][UI] Add queryId, errorMessage, and
rootExecutionId to SQL execution REST API
8b3e3458346a is described below
commit 8b3e3458346abb457288f9d6e6bed7ec25ed1f1e
Author: Kent Yao <[email protected]>
AuthorDate: Sat Mar 7 10:31:03 2026 -0800
[SPARK-55881][SQL][UI] Add queryId, errorMessage, and rootExecutionId to
SQL execution REST API
### What changes were proposed in this pull request?
Extends the SQL execution REST API (`/api/v1/applications/{appId}/sql/`)
with 3 fields that were available in the internal `SQLExecutionUIData` but not
exposed:
| Field | Type | Description |
|-------|------|-------------|
| `queryId` | String | User-facing query identifier (UUID, null for old
event logs) |
| `errorMessage` | String | Error details for failed executions (null if
not failed) |
| `rootExecutionId` | Long | Parent execution ID for sub-execution
hierarchy (-1 if root) |
**Files changed (2, +8/-2):**
- `api.scala`: Added 3 fields to `ExecutionData` class with
backward-compatible defaults
- `SqlResource.scala`: Populates fields from `SQLExecutionUIData`
### Why are the changes needed?
The client-side SQL tab DataTables rendering (SPARK-55875) needs these
fields to achieve feature parity with the original server-rendered listing
page. Without them, the REST API cannot provide Query ID, Error Message, or Sub
Execution information.
### Does this PR introduce _any_ user-facing change?
Yes — the SQL REST API response now includes `queryId`, `errorMessage`, and
`rootExecutionId` fields. Backward compatible (fields have defaults).
### How was this patch tested?
All 9 existing SQL REST API tests pass (`SqlResourceSuite` +
`SqlResourceWithActualMetricsSuite`).
### Was this patch authored or co-authored using generative AI tooling?
Yes, co-authored with GitHub Copilot.
Closes #54673 from yaooqinn/SPARK-55881.
Authored-by: Kent Yao <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../spark/status/api/v1/sql/SqlResource.scala | 5 ++++-
.../org/apache/spark/status/api/v1/sql/api.scala | 5 ++++-
.../spark/status/api/v1/sql/SqlResourceSuite.scala | 22 ++++++++++++++++++++++
3 files changed, 30 insertions(+), 2 deletions(-)
diff --git
a/sql/core/src/main/scala/org/apache/spark/status/api/v1/sql/SqlResource.scala
b/sql/core/src/main/scala/org/apache/spark/status/api/v1/sql/SqlResource.scala
index eb74d7e7131d..06c69db2e299 100644
---
a/sql/core/src/main/scala/org/apache/spark/status/api/v1/sql/SqlResource.scala
+++
b/sql/core/src/main/scala/org/apache/spark/status/api/v1/sql/SqlResource.scala
@@ -104,7 +104,10 @@ private[v1] class SqlResource extends BaseAppResource {
completed,
failed,
nodes,
- edges)
+ edges,
+ if (exec.queryId != null) exec.queryId.toString else null,
+ exec.errorMessage.orNull,
+ exec.rootExecutionId)
}
private def printableMetrics(allNodes: collection.Seq[SparkPlanGraphNode],
diff --git
a/sql/core/src/main/scala/org/apache/spark/status/api/v1/sql/api.scala
b/sql/core/src/main/scala/org/apache/spark/status/api/v1/sql/api.scala
index c0f5c9c27ec2..723b6ba311d6 100644
--- a/sql/core/src/main/scala/org/apache/spark/status/api/v1/sql/api.scala
+++ b/sql/core/src/main/scala/org/apache/spark/status/api/v1/sql/api.scala
@@ -31,7 +31,10 @@ class ExecutionData private[spark] (
val successJobIds: Seq[Int],
val failedJobIds: Seq[Int],
val nodes: collection.Seq[Node],
- val edges: collection.Seq[SparkPlanGraphEdge])
+ val edges: collection.Seq[SparkPlanGraphEdge],
+ val queryId: String = null,
+ val errorMessage: String = null,
+ val rootExecutionId: Long = -1)
case class Node private[spark](
nodeId: Long,
diff --git
a/sql/core/src/test/scala/org/apache/spark/status/api/v1/sql/SqlResourceSuite.scala
b/sql/core/src/test/scala/org/apache/spark/status/api/v1/sql/SqlResourceSuite.scala
index ba742cc9d527..e100a74e30c8 100644
---
a/sql/core/src/test/scala/org/apache/spark/status/api/v1/sql/SqlResourceSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/status/api/v1/sql/SqlResourceSuite.scala
@@ -142,6 +142,9 @@ object SqlResourceSuite {
assert(executionData.failedJobIds == Seq.empty)
assert(executionData.nodes == nodes)
assert(executionData.edges == edges)
+ assert(executionData.queryId == "efe98ba7-1532-491e-9b4f-4be621cef37c")
+ assert(executionData.errorMessage == null)
+ assert(executionData.rootExecutionId == 1)
}
}
@@ -224,5 +227,24 @@ class SqlResourceSuite extends SparkFunSuite with
PrivateMethodTester {
d,
SparkPlanGraph(nodes, edges), true, true)
assert(executionData.status == "FAILED")
+ assert(executionData.errorMessage == "now you see me, now you don't")
+ assert(executionData.rootExecutionId == 1)
+ }
+
+ test("SPARK-55881: queryId, errorMessage, rootExecutionId in ExecutionData")
{
+ // Test with null queryId (backward compat with old event logs)
+ val d = new SQLExecutionUIData(
+ 0, -1, DESCRIPTION, details = "", PLAN_DESCRIPTION, Map.empty,
+ metrics = metrics, submissionTime = 1586768888233L,
+ completionTime = Some(new Date(1586768888999L)),
+ jobs = Map(0 -> JobExecutionStatus.SUCCEEDED),
+ stages = Set[Int](), metricValues = getMetricValues(),
+ errorMessage = None, queryId = null)
+ val executionData =
+ sqlResource invokePrivate prepareExecutionData(
+ d, SparkPlanGraph(Seq.empty, Seq.empty), false, false)
+ assert(executionData.queryId == null)
+ assert(executionData.errorMessage == null)
+ assert(executionData.rootExecutionId == -1)
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]