szehon-ho commented on code in PR #50571: URL: https://github.com/apache/spark/pull/50571#discussion_r2042631436
########## sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala: ########## @@ -1376,32 +1398,67 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase { } } - test("SPARK-51747: Data source cached plan should respect options") { - withTable("t") { - spark.sql("CREATE TABLE t(a string, b string) USING CSV".stripMargin) - spark.sql("INSERT INTO TABLE t VALUES ('a;b', 'c')") - spark.sql("INSERT INTO TABLE t VALUES ('hello; world', 'test')") + test("SPARK-51747: Data source cached plan respects options if ignore conf disabled") { + Seq("true", "false").foreach { ignoreOption => + withSQLConf(SQLConf.READ_FILE_SOURCE_TABLE_CACHE_IGNORE_OPTIONS.key -> ignoreOption) { + withNamespace("ns") { + withTable("t") { + spark.sql("CREATE TABLE t(a string, b string) USING CSV".stripMargin) + spark.sql("INSERT INTO TABLE t VALUES ('a;b', 'c')") + spark.sql("INSERT INTO TABLE t VALUES ('hello; world', 'test')") - // check initial contents of table - checkAnswer(spark.table("t"), Row("a;b", "c") :: Row("hello; world", "test") :: Nil) + // check initial contents of table + checkAnswer(spark.table("t"), Row("a;b", "c") :: Row("hello; world", "test") :: Nil) - // no option - checkAnswer( - spark.sql("SELECT * FROM t"), - Row("a;b", "c") :: Row("hello; world", "test") :: Nil - ) + val shouldIgnoreOption = ignoreOption == "true" Review Comment: How about just doing 'ignoreOption' as boolean type in the seq, and then putting SQLConf.READ_FILE_SOURCE_TABLE_CACHE_IGNORE_OPTIONS.key -> ignoreOption.toString then it make the rest of the code simpler (without need for 'should' variable) -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org