[ 
https://issues.apache.org/jira/browse/SPARK-50843?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Ruifeng Zheng updated SPARK-50843:
----------------------------------
    Description: 
 

GBTmodel.trees fails with

 
{code:java}
java.lang.UnsupportedOperationException    at 
org.apache.spark.sql.connect.common.LiteralValueProtoConverter$.toDataType(LiteralValueProtoConverter.scala:241)
    at 
org.apache.spark.sql.connect.common.LiteralValueProtoConverter$.arrayBuilder$1(LiteralValueProtoConverter.scala:66)
    at 
org.apache.spark.sql.connect.common.LiteralValueProtoConverter$.toLiteralProtoBuilder(LiteralValueProtoConverter.scala:99)
    at 
org.apache.spark.sql.connect.common.LiteralValueProtoConverter$.toLiteralProto(LiteralValueProtoConverter.scala:202)
    at 
org.apache.spark.sql.connect.ml.Serializer$.serializeParam(Serializer.scala:79) 
   at 
org.apache.spark.sql.connect.ml.MLHandler$.handleMlCommand(MLHandler.scala:142) 
   at 
org.apache.spark.sql.connect.planner.SparkConnectPlanner.handleMlCommand(SparkConnectPlanner.scala:2478)
    at 
org.apache.spark.sql.connect.planner.SparkConnectPlanner.process(SparkConnectPlanner.scala:2469)
    at 
org.apache.spark.sql.connect.execution.ExecuteThreadRunner.handleCommand(ExecuteThreadRunner.scala:311)
    at 
org.apache.spark.sql.connect.execution.ExecuteThreadRunner.$anonfun$executeInternal$1(ExecuteThreadRunner.scala:214)
    at 
org.apache.spark.sql.connect.execution.ExecuteThreadRunner.$anonfun$executeInternal$1$adapted(ExecuteThreadRunner.scala:186)
    at 
org.apache.spark.sql.connect.service.SessionHolder.$anonfun$withSession$2(SessionHolder.scala:342)
    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:791)    
at 
org.apache.spark.sql.connect.service.SessionHolder.$anonfun$withSession$1(SessionHolder.scala:342)
    at 
org.apache.spark.JobArtifactSet$.withActiveJobArtifactState(JobArtifactSet.scala:94)
    at 
org.apache.spark.sql.artifact.ArtifactManager.$anonfun$withResources$1(ArtifactManager.scala:111)
    at org.apache.spark.util.Utils$.withContextClassLoader(Utils.scala:186)    
at 
org.apache.spark.sql.artifact.ArtifactManager.withClassLoaderIfNeeded(ArtifactManager.scala:101)
    at 
org.apache.spark.sql.artifact.ArtifactManager.withResources(ArtifactManager.scala:110)
    at 
org.apache.spark.sql.connect.service.SessionHolder.withSession(SessionHolder.scala:341)
    at 
org.apache.spark.sql.connect.execution.ExecuteThreadRunner.executeInternal(ExecuteThreadRunner.scala:186)
    at 
org.apache.spark.sql.connect.execution.ExecuteThreadRunner.org$apache$spark$sql$connect$execution$ExecuteThreadRunner$$execute(ExecuteThreadRunner.scala:115)
    at 
org.apache.spark.sql.connect.execution.ExecuteThreadRunner$ExecutionThread.run(ExecuteThreadRunner.scala:336)
 {code}

  was:
{code:java}
java.lang.UnsupportedOperationException    at 
org.apache.spark.sql.connect.common.LiteralValueProtoConverter$.toDataType(LiteralValueProtoConverter.scala:241)
    at 
org.apache.spark.sql.connect.common.LiteralValueProtoConverter$.arrayBuilder$1(LiteralValueProtoConverter.scala:66)
    at 
org.apache.spark.sql.connect.common.LiteralValueProtoConverter$.toLiteralProtoBuilder(LiteralValueProtoConverter.scala:99)
    at 
org.apache.spark.sql.connect.common.LiteralValueProtoConverter$.toLiteralProto(LiteralValueProtoConverter.scala:202)
    at 
org.apache.spark.sql.connect.ml.Serializer$.serializeParam(Serializer.scala:79) 
   at 
org.apache.spark.sql.connect.ml.MLHandler$.handleMlCommand(MLHandler.scala:142) 
   at 
org.apache.spark.sql.connect.planner.SparkConnectPlanner.handleMlCommand(SparkConnectPlanner.scala:2478)
    at 
org.apache.spark.sql.connect.planner.SparkConnectPlanner.process(SparkConnectPlanner.scala:2469)
    at 
org.apache.spark.sql.connect.execution.ExecuteThreadRunner.handleCommand(ExecuteThreadRunner.scala:311)
    at 
org.apache.spark.sql.connect.execution.ExecuteThreadRunner.$anonfun$executeInternal$1(ExecuteThreadRunner.scala:214)
    at 
org.apache.spark.sql.connect.execution.ExecuteThreadRunner.$anonfun$executeInternal$1$adapted(ExecuteThreadRunner.scala:186)
    at 
org.apache.spark.sql.connect.service.SessionHolder.$anonfun$withSession$2(SessionHolder.scala:342)
    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:791)    
at 
org.apache.spark.sql.connect.service.SessionHolder.$anonfun$withSession$1(SessionHolder.scala:342)
    at 
org.apache.spark.JobArtifactSet$.withActiveJobArtifactState(JobArtifactSet.scala:94)
    at 
org.apache.spark.sql.artifact.ArtifactManager.$anonfun$withResources$1(ArtifactManager.scala:111)
    at org.apache.spark.util.Utils$.withContextClassLoader(Utils.scala:186)    
at 
org.apache.spark.sql.artifact.ArtifactManager.withClassLoaderIfNeeded(ArtifactManager.scala:101)
    at 
org.apache.spark.sql.artifact.ArtifactManager.withResources(ArtifactManager.scala:110)
    at 
org.apache.spark.sql.connect.service.SessionHolder.withSession(SessionHolder.scala:341)
    at 
org.apache.spark.sql.connect.execution.ExecuteThreadRunner.executeInternal(ExecuteThreadRunner.scala:186)
    at 
org.apache.spark.sql.connect.execution.ExecuteThreadRunner.org$apache$spark$sql$connect$execution$ExecuteThreadRunner$$execute(ExecuteThreadRunner.scala:115)
    at 
org.apache.spark.sql.connect.execution.ExecuteThreadRunner$ExecutionThread.run(ExecuteThreadRunner.scala:336)
 {code}


> Support access submodel in TreeEnsembleModel
> --------------------------------------------
>
>                 Key: SPARK-50843
>                 URL: https://issues.apache.org/jira/browse/SPARK-50843
>             Project: Spark
>          Issue Type: Sub-task
>          Components: Connect, ML, PySpark
>    Affects Versions: 4.0.0
>            Reporter: Ruifeng Zheng
>            Priority: Major
>
>  
> GBTmodel.trees fails with
>  
> {code:java}
> java.lang.UnsupportedOperationException    at 
> org.apache.spark.sql.connect.common.LiteralValueProtoConverter$.toDataType(LiteralValueProtoConverter.scala:241)
>     at 
> org.apache.spark.sql.connect.common.LiteralValueProtoConverter$.arrayBuilder$1(LiteralValueProtoConverter.scala:66)
>     at 
> org.apache.spark.sql.connect.common.LiteralValueProtoConverter$.toLiteralProtoBuilder(LiteralValueProtoConverter.scala:99)
>     at 
> org.apache.spark.sql.connect.common.LiteralValueProtoConverter$.toLiteralProto(LiteralValueProtoConverter.scala:202)
>     at 
> org.apache.spark.sql.connect.ml.Serializer$.serializeParam(Serializer.scala:79)
>     at 
> org.apache.spark.sql.connect.ml.MLHandler$.handleMlCommand(MLHandler.scala:142)
>     at 
> org.apache.spark.sql.connect.planner.SparkConnectPlanner.handleMlCommand(SparkConnectPlanner.scala:2478)
>     at 
> org.apache.spark.sql.connect.planner.SparkConnectPlanner.process(SparkConnectPlanner.scala:2469)
>     at 
> org.apache.spark.sql.connect.execution.ExecuteThreadRunner.handleCommand(ExecuteThreadRunner.scala:311)
>     at 
> org.apache.spark.sql.connect.execution.ExecuteThreadRunner.$anonfun$executeInternal$1(ExecuteThreadRunner.scala:214)
>     at 
> org.apache.spark.sql.connect.execution.ExecuteThreadRunner.$anonfun$executeInternal$1$adapted(ExecuteThreadRunner.scala:186)
>     at 
> org.apache.spark.sql.connect.service.SessionHolder.$anonfun$withSession$2(SessionHolder.scala:342)
>     at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:791)   
>  at 
> org.apache.spark.sql.connect.service.SessionHolder.$anonfun$withSession$1(SessionHolder.scala:342)
>     at 
> org.apache.spark.JobArtifactSet$.withActiveJobArtifactState(JobArtifactSet.scala:94)
>     at 
> org.apache.spark.sql.artifact.ArtifactManager.$anonfun$withResources$1(ArtifactManager.scala:111)
>     at org.apache.spark.util.Utils$.withContextClassLoader(Utils.scala:186)   
>  at 
> org.apache.spark.sql.artifact.ArtifactManager.withClassLoaderIfNeeded(ArtifactManager.scala:101)
>     at 
> org.apache.spark.sql.artifact.ArtifactManager.withResources(ArtifactManager.scala:110)
>     at 
> org.apache.spark.sql.connect.service.SessionHolder.withSession(SessionHolder.scala:341)
>     at 
> org.apache.spark.sql.connect.execution.ExecuteThreadRunner.executeInternal(ExecuteThreadRunner.scala:186)
>     at 
> org.apache.spark.sql.connect.execution.ExecuteThreadRunner.org$apache$spark$sql$connect$execution$ExecuteThreadRunner$$execute(ExecuteThreadRunner.scala:115)
>     at 
> org.apache.spark.sql.connect.execution.ExecuteThreadRunner$ExecutionThread.run(ExecuteThreadRunner.scala:336)
>  {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to