LuciferYang commented on code in PR #50178:
URL: https://github.com/apache/spark/pull/50178#discussion_r1982784603


##########
sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala:
##########
@@ -44,33 +44,48 @@ import 
org.apache.spark.sql.execution.{CommandExecutionMode, QueryExecution, SQL
 import org.apache.spark.sql.hive._
 import org.apache.spark.sql.hive.client.HiveClient
 import org.apache.spark.sql.internal.{SessionState, SharedState, SQLConf, 
WithTestConf}
-import org.apache.spark.sql.internal.StaticSQLConf.{CATALOG_IMPLEMENTATION, 
WAREHOUSE_PATH}
+import org.apache.spark.sql.internal.StaticSQLConf.{CATALOG_IMPLEMENTATION, 
RESULT_QUERY_STAGE_MAX_THREAD_THRESHOLD, SHUFFLE_EXCHANGE_MAX_THREAD_THRESHOLD, 
WAREHOUSE_PATH}
 import org.apache.spark.util.{ShutdownHookManager, Utils}
 
+private object TesSparkConf {
+  def conf: SparkConf = {
+    val sparkConf = new SparkConf()
+      .set("spark.sql.test", "")
+      .set(SQLConf.CODEGEN_FALLBACK.key, "false")
+      .set(SQLConf.CODEGEN_FACTORY_MODE.key, 
CodegenObjectFactoryMode.CODEGEN_ONLY.toString)
+      .set(HiveUtils.HIVE_METASTORE_BARRIER_PREFIXES.key,
+        "org.apache.spark.sql.hive.execution.PairSerDe")
+      .set(WAREHOUSE_PATH.key, 
TestHiveContext.makeWarehouseDir().toURI.getPath)
+      // SPARK-8910
+      .set(UI_ENABLED, false)
+      .set(config.UNSAFE_EXCEPTION_ON_MEMORY_LEAK, true)
+      // Hive changed the default of 
hive.metastore.disallow.incompatible.col.type.changes
+      // from false to true. For details, see the JIRA HIVE-12320 and 
HIVE-17764.
+      
.set("spark.hadoop.hive.metastore.disallow.incompatible.col.type.changes", 
"false")
+      // Disable ConvertToLocalRelation for better test coverage. Test cases 
built on
+      // LocalRelation will exercise the optimization rules better by 
disabling it as
+      // this rule may potentially block testing of other optimization rules 
such as
+      // ConstantPropagation etc.
+      .set(SQLConf.OPTIMIZER_EXCLUDED_RULES.key, 
ConvertToLocalRelation.ruleName)
+    // SPARK-51365: Due to the fact that the GitHub-hosted Runner on the 
`macOS + AppleSilicon chip`
+    // combination has only half the memory resources of Runners with other 
specifications,
+    // it is necessary to limit the number of threads in this case to avoid 
the issue of
+    // memory resources being exhausted by frequent thread creation during 
testing.
+    if (sys.env.contains("GITHUB_ACTIONS") && Utils.isMacOnAppleSilicon) {
+      conf.set(SHUFFLE_EXCHANGE_MAX_THREAD_THRESHOLD, 48)

Review Comment:
   For the Hive module, we need to impose stricter limits because the current 
-Xss configuration is 64m.
   
   



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to