Yang Jie created SPARK-51605:
--------------------------------

             Summary: If the `logs` directory does not exist, the first launch 
of `bin/spark-shell --remote local` will fail.
                 Key: SPARK-51605
                 URL: https://issues.apache.org/jira/browse/SPARK-51605
             Project: Spark
          Issue Type: Bug
          Components: Connect
    Affects Versions: 4.1.0
            Reporter: Yang Jie


{code:java}
bin/spark-shell --remote local
WARNING: Using incubator modules: jdk.incubator.vector
Exception in thread "main" java.nio.file.NoSuchFileException: 
/Users/yangjie01/Tools/spark-4.1.0-SNAPSHOT-bin-3.4.1/logs
        at 
java.base/sun.nio.fs.UnixException.translateToIOException(UnixException.java:92)
        at 
java.base/sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:106)
        at 
java.base/sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:111)
        at 
java.base/sun.nio.fs.UnixFileAttributeViews$Basic.readAttributes(UnixFileAttributeViews.java:55)
        at 
java.base/sun.nio.fs.UnixFileSystemProvider.readAttributes(UnixFileSystemProvider.java:148)
        at java.base/java.nio.file.Files.readAttributes(Files.java:1851)
        at 
java.base/sun.nio.fs.PollingWatchService.doPrivilegedRegister(PollingWatchService.java:173)
        at 
java.base/sun.nio.fs.PollingWatchService$2.run(PollingWatchService.java:154)
        at 
java.base/sun.nio.fs.PollingWatchService$2.run(PollingWatchService.java:151)
        at 
java.base/java.security.AccessController.doPrivileged(AccessController.java:569)
        at 
java.base/sun.nio.fs.PollingWatchService.register(PollingWatchService.java:150)
        at java.base/sun.nio.fs.UnixPath.register(UnixPath.java:885)
        at java.base/java.nio.file.Path.register(Path.java:894)
        at 
org.apache.spark.sql.connect.SparkSession$.waitUntilFileExists(SparkSession.scala:717)
        at 
org.apache.spark.sql.connect.SparkSession$.$anonfun$withLocalConnectServer$13(SparkSession.scala:798)
        at 
org.apache.spark.sql.connect.SparkSession$.$anonfun$withLocalConnectServer$13$adapted(SparkSession.scala:791)
        at scala.Option.foreach(Option.scala:437)
        at 
org.apache.spark.sql.connect.SparkSession$.withLocalConnectServer(SparkSession.scala:791)
        at 
org.apache.spark.sql.application.ConnectRepl$.doMain(ConnectRepl.scala:67)
        at 
org.apache.spark.sql.application.ConnectRepl$.main(ConnectRepl.scala:57)
        at org.apache.spark.sql.application.ConnectRepl.main(ConnectRepl.scala)
        at 
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
        at 
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.base/java.lang.reflect.Method.invoke(Method.java:569)
        at 
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
        at 
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1027)
        at 
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:204)
        at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:227)
        at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:96)
        at 
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1132)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1141)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
25/03/26 15:39:40 INFO ShutdownHookManager: Shutdown hook called
25/03/26 15:39:40 INFO ShutdownHookManager: Deleting directory 
/private/var/folders/j2/cfn7w6795538n_416_27rkqm0000gn/T/spark-fe4c9d71-b7d7-437e-b486-514cc538cccc
 {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to