[ https://issues.apache.org/jira/browse/SPARK-48238?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17932777#comment-17932777 ]
Chris Nauroth commented on SPARK-48238: --------------------------------------- Linking to SPARK-51408, which has a follow-up to get the tests passing in my network. > Spark fail to start due to class > o.a.h.yarn.server.webproxy.amfilter.AmIpFilter is not a jakarta.servlet.Filter > --------------------------------------------------------------------------------------------------------------- > > Key: SPARK-48238 > URL: https://issues.apache.org/jira/browse/SPARK-48238 > Project: Spark > Issue Type: Bug > Components: Build > Affects Versions: 4.0.0 > Reporter: Cheng Pan > Assignee: Cheng Pan > Priority: Blocker > Labels: pull-request-available > Fix For: 4.0.0 > > > I tested the latest master branch, it failed to start on YARN mode > {code:java} > dev/make-distribution.sh --tgz -Phive,hive-thriftserver,yarn{code} > > {code:java} > $ bin/spark-sql --master yarn > WARNING: Using incubator modules: jdk.incubator.vector > Setting default log level to "WARN". > To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use > setLogLevel(newLevel). > 2024-05-10 17:58:17 WARN NativeCodeLoader: Unable to load native-hadoop > library for your platform... using builtin-java classes where applicable > 2024-05-10 17:58:18 WARN Client: Neither spark.yarn.jars nor > spark.yarn.archive} is set, falling back to uploading libraries under > SPARK_HOME. > 2024-05-10 17:58:25 ERROR SparkContext: Error initializing SparkContext. > org.sparkproject.jetty.util.MultiException: Multiple exceptions > at > org.sparkproject.jetty.util.MultiException.ifExceptionThrow(MultiException.java:117) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.sparkproject.jetty.servlet.ServletHandler.initialize(ServletHandler.java:751) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.sparkproject.jetty.servlet.ServletContextHandler.startContext(ServletContextHandler.java:392) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.sparkproject.jetty.server.handler.ContextHandler.doStart(ContextHandler.java:902) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.sparkproject.jetty.servlet.ServletContextHandler.doStart(ServletContextHandler.java:306) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.sparkproject.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:93) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at org.apache.spark.ui.ServerInfo.addHandler(JettyUtils.scala:514) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.ui.SparkUI.$anonfun$attachAllHandlers$2(SparkUI.scala:81) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.ui.SparkUI.$anonfun$attachAllHandlers$2$adapted(SparkUI.scala:81) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at scala.collection.IterableOnceOps.foreach(IterableOnce.scala:619) > ~[scala-library-2.13.13.jar:?] > at scala.collection.IterableOnceOps.foreach$(IterableOnce.scala:617) > ~[scala-library-2.13.13.jar:?] > at scala.collection.AbstractIterable.foreach(Iterable.scala:935) > ~[scala-library-2.13.13.jar:?] > at > org.apache.spark.ui.SparkUI.$anonfun$attachAllHandlers$1(SparkUI.scala:81) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.ui.SparkUI.$anonfun$attachAllHandlers$1$adapted(SparkUI.scala:79) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at scala.Option.foreach(Option.scala:437) ~[scala-library-2.13.13.jar:?] > at org.apache.spark.ui.SparkUI.attachAllHandlers(SparkUI.scala:79) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at org.apache.spark.SparkContext.$anonfun$new$31(SparkContext.scala:690) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.SparkContext.$anonfun$new$31$adapted(SparkContext.scala:690) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at scala.Option.foreach(Option.scala:437) ~[scala-library-2.13.13.jar:?] > at org.apache.spark.SparkContext.<init>(SparkContext.scala:690) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2963) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:1118) > ~[spark-sql_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at scala.Option.getOrElse(Option.scala:201) [scala-library-2.13.13.jar:?] > at > org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:1112) > [spark-sql_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.sql.hive.thriftserver.SparkSQLEnv$.init(SparkSQLEnv.scala:64) > [spark-hive-thriftserver_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.<init>(SparkSQLCLIDriver.scala:405) > [spark-hive-thriftserver_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:162) > [spark-hive-thriftserver_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) > [spark-hive-thriftserver_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native > Method) ~[?:?] > at > java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) > ~[?:?] > at > java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > ~[?:?] > at java.base/java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] > at > org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1019) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:196) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:219) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:95) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1109) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1118) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > Suppressed: java.lang.IllegalStateException: class > org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter is not a > jakarta.servlet.Filter > at > org.sparkproject.jetty.servlet.FilterHolder.doStart(FilterHolder.java:99) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.sparkproject.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:93) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.sparkproject.jetty.servlet.ServletHandler.lambda$initialize$2(ServletHandler.java:724) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625) > ~[?:?] > at > java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734) > ~[?:?] > at > java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:762) > ~[?:?] > at > org.sparkproject.jetty.servlet.ServletHandler.initialize(ServletHandler.java:749) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.sparkproject.jetty.servlet.ServletContextHandler.startContext(ServletContextHandler.java:392) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.sparkproject.jetty.server.handler.ContextHandler.doStart(ContextHandler.java:902) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.sparkproject.jetty.servlet.ServletContextHandler.doStart(ServletContextHandler.java:306) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.sparkproject.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:93) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at org.apache.spark.ui.ServerInfo.addHandler(JettyUtils.scala:514) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.ui.SparkUI.$anonfun$attachAllHandlers$2(SparkUI.scala:81) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.ui.SparkUI.$anonfun$attachAllHandlers$2$adapted(SparkUI.scala:81) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at scala.collection.IterableOnceOps.foreach(IterableOnce.scala:619) > ~[scala-library-2.13.13.jar:?] > at scala.collection.IterableOnceOps.foreach$(IterableOnce.scala:617) > ~[scala-library-2.13.13.jar:?] > at scala.collection.AbstractIterable.foreach(Iterable.scala:935) > ~[scala-library-2.13.13.jar:?] > at > org.apache.spark.ui.SparkUI.$anonfun$attachAllHandlers$1(SparkUI.scala:81) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.ui.SparkUI.$anonfun$attachAllHandlers$1$adapted(SparkUI.scala:79) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at scala.Option.foreach(Option.scala:437) > ~[scala-library-2.13.13.jar:?] > at org.apache.spark.ui.SparkUI.attachAllHandlers(SparkUI.scala:79) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.SparkContext.$anonfun$new$31(SparkContext.scala:690) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.SparkContext.$anonfun$new$31$adapted(SparkContext.scala:690) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at scala.Option.foreach(Option.scala:437) > ~[scala-library-2.13.13.jar:?] > at org.apache.spark.SparkContext.<init>(SparkContext.scala:690) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2963) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:1118) > ~[spark-sql_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at scala.Option.getOrElse(Option.scala:201) > [scala-library-2.13.13.jar:?] > at > org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:1112) > [spark-sql_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.sql.hive.thriftserver.SparkSQLEnv$.init(SparkSQLEnv.scala:64) > [spark-hive-thriftserver_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.<init>(SparkSQLCLIDriver.scala:405) > [spark-hive-thriftserver_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:162) > [spark-hive-thriftserver_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) > [spark-hive-thriftserver_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native > Method) ~[?:?] > at > java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) > ~[?:?] > at > java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > ~[?:?] > at java.base/java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] > at > org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1019) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:196) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:219) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:95) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1109) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1118) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) > [spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > Caused by: java.lang.IllegalStateException: class > org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter is not a > jakarta.servlet.Filter > at > org.sparkproject.jetty.servlet.FilterHolder.doStart(FilterHolder.java:99) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.sparkproject.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:93) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > org.sparkproject.jetty.servlet.ServletHandler.lambda$initialize$2(ServletHandler.java:724) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > at > java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625) > ~[?:?] > at > java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734) > ~[?:?] > at > java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:762) > ~[?:?] > at > org.sparkproject.jetty.servlet.ServletHandler.initialize(ServletHandler.java:749) > ~[spark-core_2.13-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] > ... 38 more > Exception in thread "main" MultiException[java.lang.IllegalStateException: > class org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter is not a > jakarta.servlet.Filter, java.lang.IllegalStateException: class > org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter is not a > jakarta.servlet.Filter] > at > org.sparkproject.jetty.util.MultiException.ifExceptionThrow(MultiException.java:117) > at > org.sparkproject.jetty.servlet.ServletHandler.initialize(ServletHandler.java:751) > at > org.sparkproject.jetty.servlet.ServletContextHandler.startContext(ServletContextHandler.java:392) > at > org.sparkproject.jetty.server.handler.ContextHandler.doStart(ContextHandler.java:902) > at > org.sparkproject.jetty.servlet.ServletContextHandler.doStart(ServletContextHandler.java:306) > at > org.sparkproject.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:93) > at org.apache.spark.ui.ServerInfo.addHandler(JettyUtils.scala:514) > at > org.apache.spark.ui.SparkUI.$anonfun$attachAllHandlers$2(SparkUI.scala:81) > at > org.apache.spark.ui.SparkUI.$anonfun$attachAllHandlers$2$adapted(SparkUI.scala:81) > at scala.collection.IterableOnceOps.foreach(IterableOnce.scala:619) > at scala.collection.IterableOnceOps.foreach$(IterableOnce.scala:617) > at scala.collection.AbstractIterable.foreach(Iterable.scala:935) > at > org.apache.spark.ui.SparkUI.$anonfun$attachAllHandlers$1(SparkUI.scala:81) > at > org.apache.spark.ui.SparkUI.$anonfun$attachAllHandlers$1$adapted(SparkUI.scala:79) > at scala.Option.foreach(Option.scala:437) > at org.apache.spark.ui.SparkUI.attachAllHandlers(SparkUI.scala:79) > at org.apache.spark.SparkContext.$anonfun$new$31(SparkContext.scala:690) > at > org.apache.spark.SparkContext.$anonfun$new$31$adapted(SparkContext.scala:690) > at scala.Option.foreach(Option.scala:437) > at org.apache.spark.SparkContext.<init>(SparkContext.scala:690) > at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2963) > at > org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:1118) > at scala.Option.getOrElse(Option.scala:201) > at > org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:1112) > at > org.apache.spark.sql.hive.thriftserver.SparkSQLEnv$.init(SparkSQLEnv.scala:64) > at > org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.<init>(SparkSQLCLIDriver.scala:405) > at > org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:162) > at > org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) > at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native > Method) > at > java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) > at > java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.base/java.lang.reflect.Method.invoke(Method.java:568) > at > org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) > at > org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1019) > at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:196) > at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:219) > at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:95) > at > org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1109) > at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1118) > at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) > Suppressed: java.lang.IllegalStateException: class > org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter is not a > jakarta.servlet.Filter > at > org.sparkproject.jetty.servlet.FilterHolder.doStart(FilterHolder.java:99) > at > org.sparkproject.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:93) > at > org.sparkproject.jetty.servlet.ServletHandler.lambda$initialize$2(ServletHandler.java:724) > at > java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625) > at > java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734) > at > java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:762) > at > org.sparkproject.jetty.servlet.ServletHandler.initialize(ServletHandler.java:749) > ... 38 more > Caused by: java.lang.IllegalStateException: class > org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter is not a > jakarta.servlet.Filter > at > org.sparkproject.jetty.servlet.FilterHolder.doStart(FilterHolder.java:99) > at > org.sparkproject.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:93) > at > org.sparkproject.jetty.servlet.ServletHandler.lambda$initialize$2(ServletHandler.java:724) > at > java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625) > at > java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734) > at > java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:762) > at > org.sparkproject.jetty.servlet.ServletHandler.initialize(ServletHandler.java:749) > ... 38 more {code} > > > Possibly caused by SPARK-45522 and SPARK-47118 -- This message was sent by Atlassian Jira (v8.20.10#820010) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org