LuciferYang commented on code in PR #50999:
URL: https://github.com/apache/spark/pull/50999#discussion_r2104982797


##########
project/MimaExcludes.scala:
##########
@@ -34,209 +33,14 @@ import com.typesafe.tools.mima.core.*
  */
 object MimaExcludes {
 
-  lazy val v41excludes = v40excludes ++ Seq(
+  // Exclude rules for 4.1.x from 4.0.0
+  lazy val v41excludes = defaultExcludes ++ Seq(
     // [SPARK-51261][ML][CONNECT] Introduce model size estimation to control 
ml cache
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.ml.linalg.Vector.getSizeInBytes")
-  )
-
-  // Exclude rules for 4.0.x from 3.5.0
-  lazy val v40excludes = defaultExcludes ++ Seq(
-    // [SPARK-44863][UI] Add a button to download thread dump as a txt in 
Spark UI
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.status.api.v1.ThreadStackTrace.*"),
-    
ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.status.api.v1.ThreadStackTrace$"),
-    //[SPARK-46399][Core] Add exit status to the Application End event for the 
use of Spark Listener
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.scheduler.SparkListenerApplicationEnd.*"),
-    
ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.scheduler.SparkListenerApplicationEnd$"),
-    // [SPARK-45427][CORE] Add RPC SSL settings to SSLOptions and 
SparkTransportConf
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.network.netty.SparkTransportConf.fromSparkConf"),
-    // [SPARK-45022][SQL] Provide context for dataset API errors
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.QueryContext.contextType"),
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.QueryContext.code"),
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.QueryContext.callSite"),
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.QueryContext.summary"),
-    
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.sql.types.Decimal.fromStringANSI$default$3"),
-    
ProblemFilters.exclude[IncompatibleMethTypeProblem]("org.apache.spark.sql.types.Decimal.fromStringANSI"),
-    // [SPARK-45762][CORE] Support shuffle managers defined in user jars by 
changing startup order
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.SparkEnv.this"),
-    // [SPARK-46480][CORE][SQL] Fix NPE when table cache task attempt
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.TaskContext.isFailed"),
-
-    // SPARK-43299: Convert StreamingQueryException in Scala Client
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.StreamingQueryException"),
-
-    // SPARK-45856: Move ArtifactManager from Spark Connect into SparkSession 
(sql/core)
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.storage.CacheId.apply"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.storage.CacheId.userId"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.storage.CacheId.sessionId"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.storage.CacheId.copy"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.storage.CacheId.copy$default$3"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.storage.CacheId.this"),
-    
ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.storage.CacheId$"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.storage.CacheId.apply"),
-
-    // SPARK-46410: Assign error classes/subclasses to 
JdbcUtils.classifyException
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.jdbc.JdbcDialect.classifyException"),
-    // TODO(SPARK-46878): Invalid Mima report for StringType extension
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.types.StringType.this"),
-    // SPARK-47011: Remove deprecated 
BinaryClassificationMetrics.scoreLabelsWeight
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.mllib.evaluation.BinaryClassificationMetrics.scoreLabelsWeight"),
-    // SPARK-46938: Javax -> Jakarta namespace change.
-    
ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.ui.ProxyRedirectHandler$ResponseWrapper"),
-    
ProblemFilters.exclude[IncompatibleMethTypeProblem]("org.apache.spark.ui.ProxyRedirectHandler#ResponseWrapper.this"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.jdbc.DB2Dialect#DB2SQLBuilder.this"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.jdbc.DB2Dialect#DB2SQLQueryBuilder.this"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.jdbc.MsSqlServerDialect#MsSqlServerSQLBuilder.this"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.jdbc.MsSqlServerDialect#MsSqlServerSQLQueryBuilder.this"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.jdbc.MySQLDialect#MySQLSQLBuilder.this"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.jdbc.MySQLDialect#MySQLSQLQueryBuilder.this"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.jdbc.OracleDialect#OracleSQLBuilder.this"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.jdbc.OracleDialect#OracleSQLQueryBuilder.this"),
-    // SPARK-47706: Bump json4s from 3.7.0-M11 to 4.0.7
-    
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.sql.expressions.MutableAggregationBuffer.jsonValue"),
-    
ProblemFilters.exclude[IncompatibleMethTypeProblem]("org.apache.spark.sql.types.DataType#JSortedObject.unapplySeq"),
-    
ProblemFilters.exclude[IncompatibleMethTypeProblem]("org.apache.spark.mllib.tree.model.TreeEnsembleModel#SaveLoadV1_0.readMetadata"),
-    // SPARK-47814: Move `KinesisTestUtils` & 
`WriteInputFormatTestDataGenerator` from `main` to `test`
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.python.TestWritable"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.python.TestWritable$"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.python.WriteInputFormatTestDataGenerator"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.python.WriteInputFormatTestDataGenerator$"),
-    // SPARK-47764: Cleanup shuffle dependencies based on ShuffleCleanupMode
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.shuffle.MigratableResolver.addShuffleToSkip"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.SQLContext#implicits._sqlContext"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.SQLImplicits._sqlContext"),
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.sql.SQLImplicits.session"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.SparkSession#implicits._sqlContext"),
-    // SPARK-48761: Add clusterBy() to CreateTableWriter.
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.sql.CreateTableWriter.clusterBy"),
-    // SPARK-48900: Add `reason` string to all job / stage / job group 
cancellation calls
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.scheduler.JobWaiter.cancel"),
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.FutureAction.cancel"),
-    // SPARK-48901: Add clusterBy() to DataStreamWriter.
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.sql.DataStreamWriter.clusterBy"),
-    // SPARK-49027: A shared Column API
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.ColumnName"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.TypedColumn"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.functions"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.functions$"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.expressions.Aggregator"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.expressions.MutableAggregationBuffer"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.expressions.UserDefinedAggregateFunction"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.expressions.UserDefinedFunction"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.expressions.Window"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.expressions.Window$"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.expressions.WindowSpec"),
-
-    // SPARK-49423: Consolidate Observation in sql/api
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.Observation"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.Observation$"),
-
-    // SPARK-49425: Create a shared DataFrameWriter interface.
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.DataFrameWriter"),
-
-    // SPARK-49284: Shared Catalog interface.
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.catalog.CatalogMetadata"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.catalog.Column"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.catalog.Database"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.catalog.Function"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.catalog.Table"),
-
-    // SPARK-49426: Shared DataFrameWriterV2
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.CreateTableWriter"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.DataFrameWriterV2"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.WriteConfigMethods"),
-
-    // SPARK-49424: Shared Encoders
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.Encoders"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.Encoders$"),
-
-    // SPARK-49413: Create a shared RuntimeConfig interface.
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.RuntimeConfig"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.RuntimeConfig$"),
-
-    // SPARK-49287: Shared Streaming interfaces
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.scheduler.SparkListenerEvent"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.ForeachWriter"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.SourceProgress"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.SourceProgress$"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.StateOperatorProgress"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.StateOperatorProgress$"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.StreamingQueryListener"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.StreamingQueryListener$"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.StreamingQueryListener$Event"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.StreamingQueryListener$QueryIdleEvent"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.StreamingQueryListener$QueryProgressEvent"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.StreamingQueryListener$QueryStartedEvent"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.StreamingQueryListener$QueryTerminatedEvent"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.StreamingQueryStatus"),
-
-    // SPARK-49415: Shared SQLImplicits.
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.DatasetHolder"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.DatasetHolder$"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.LowPrioritySQLImplicits"),
-    
ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.sql.SQLContext$implicits$"),
-    
ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.sql.SQLImplicits"),
-    
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.sql.SQLImplicits.StringToColumn"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.SQLImplicits.this"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SQLImplicits$StringToColumn"),
-    
ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.sql.SparkSession$implicits$"),
-    
ProblemFilters.exclude[InheritedNewAbstractMethodProblem]("org.apache.spark.sql.SQLImplicits.session"),
+    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.ml.linalg.Vector.getSizeInBytes"),
 
-    // SPARK-49282: Shared SparkSessionBuilder
-    
ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.sql.SparkSession$Builder"),
-
-    // SPARK-49286: Avro/Protobuf functions in sql/api
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.avro.functions"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.avro.functions$"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.protobuf.functions"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.protobuf.functions$"),
-
-    // SPARK-49434: Move aggregators to sql/api
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.expressions.javalang.typed"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.expressions.scalalang.typed"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.expressions.scalalang.typed$"),
-
-    // SPARK-49418: Consolidate thread local handling in sql/api
-    
ProblemFilters.exclude[IncompatibleMethTypeProblem]("org.apache.spark.sql.SparkSession.setActiveSession"),
-    
ProblemFilters.exclude[IncompatibleMethTypeProblem]("org.apache.spark.sql.SparkSession.setDefaultSession"),
-    
ProblemFilters.exclude[DirectAbstractMethodProblem]("org.apache.spark.sql.api.SparkSessionCompanion.clearActiveSession"),
-    
ProblemFilters.exclude[DirectAbstractMethodProblem]("org.apache.spark.sql.api.SparkSessionCompanion.clearDefaultSession"),
-
-    // SPARK-49748: Add getCondition and deprecate getErrorClass in 
SparkThrowable
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.SparkThrowable.getCondition"),
-
-    // SPARK-50112: Moving avro files from connector to sql/core
-    ProblemFilters.exclude[Problem]("org.apache.spark.sql.avro.*"),
-
-    // SPARK-49700: Unified Scala SQL Interface.
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.DataFrameNaFunctions"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.DataFrameReader"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.DataFrameStatFunctions"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.KeyValueGroupedDataset"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SQLImplicits"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SparkSession"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SparkSession$"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SparkSession$Builder"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SparkSession$implicits$"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.package"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.package$"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.catalog.Catalog"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.DataStreamReader"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.DataStreamWriter"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.DataStreamWriter$"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.StreamingQueryManager"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.StreamingQuery"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SQLContext"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SQLContext$"),
-    
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SQLContext$implicits$"),
-
-    // SPARK-50768: Introduce TaskContext.createResourceUninterruptibly to 
avoid stream leak by task interruption
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.TaskContext.interruptible"),
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.TaskContext.pendingInterrupt"),
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.TaskContext.createResourceUninterruptibly"),
-
-  ) ++ loggingExcludes("org.apache.spark.sql.DataFrameReader") ++
-    loggingExcludes("org.apache.spark.sql.streaming.DataStreamReader") ++
-    loggingExcludes("org.apache.spark.sql.SparkSession#Builder")
+    // [SPARK-52221][SQL] Refactor SqlScriptingLocalVariableManager into more 
generic context manager
+    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.scripting.SqlScriptingExecution.withLocalVariableManager")

Review Comment:
   This was an omission from before.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to