Skip to content

Commit a858730

Browse files
committed
Enable CometSqlFileTestSuite
1 parent f4a6f9a commit a858730

1 file changed

Lines changed: 59 additions & 1 deletion

File tree

spark/src/test/scala/org/apache/comet/CometSqlFileTestSuite.scala

Lines changed: 59 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,63 @@ class CometSqlFileTestSuite extends CometTestBase with AdaptiveSparkPlanHelper {
7979
"spark.sql.optimizer.excludedRules" ->
8080
"org.apache.spark.sql.catalyst.optimizer.ConstantFolding")
8181

82+
// TODO: https://github.com/apache/datafusion-comet/issues/3384
83+
// Skipped for first stage of 4.1 support
84+
private val excludedInSpark41 = Set(
85+
"expressions/string/string_lpad.sql",
86+
"expressions/string/concat.sql",
87+
"expressions/string/string_repeat.sql",
88+
"expressions/string/length.sql",
89+
"expressions/string/string_trim.sql",
90+
"expressions/string/ends_with.sql",
91+
"expressions/string/init_cap_enabled.sql",
92+
"expressions/string/lower.sql",
93+
"expressions/string/string_space.sql",
94+
"expressions/string/bit_length.sql",
95+
"expressions/string/unhex.sql",
96+
"expressions/string/upper.sql",
97+
"expressions/string/string_replace.sql",
98+
"expressions/string/starts_with.sql",
99+
"expressions/string/string_rpad.sql",
100+
"expressions/string/hex.sql",
101+
"expressions/string/regexp_replace_enabled.sql",
102+
"expressions/string/chr.sql",
103+
"expressions/string/octet_length.sql",
104+
"expressions/string/like.sql",
105+
"expressions/string/ascii.sql",
106+
"expressions/string/string_instr.sql",
107+
"expressions/string/string_translate.sql",
108+
"expressions/string/reverse.sql",
109+
"expressions/string/contains.sql",
110+
"expressions/decimal/decimal_ops.sql",
111+
"expressions/datetime/last_day.sql",
112+
"expressions/datetime/date_format.sql",
113+
"expressions/datetime/date_format_enabled.sql",
114+
"expressions/datetime/date_diff.sql",
115+
"expressions/datetime/date_add.sql",
116+
"expressions/datetime/date_sub.sql",
117+
"expressions/datetime/from_unix_time.sql",
118+
"expressions/datetime/make_date.sql",
119+
"expressions/datetime/next_day.sql",
120+
"expressions/map/map_from_entries.sql",
121+
"expressions/map/map_contains_key.sql",
122+
"expressions/math/arithmetic.sql",
123+
"expressions/math/floor.sql",
124+
"expressions/math/ceil.sql",
125+
"expressions/math/isnan.sql",
126+
"expressions/math/abs.sql",
127+
"expressions/math/signum.sql",
128+
"expressions/math/round.sql",
129+
"expressions/math/arithmetic_ansi.sql",
130+
"expressions/struct/create_named_struct.sql",
131+
"expressions/bitwise/bitwise.sql",
132+
"expressions/cast/cast.sql",
133+
"expressions/array/array_contains.sql",
134+
"expressions/array/create_array.sql",
135+
"expressions/conditional/coalesce.sql",
136+
"expressions/conditional/predicates.sql",
137+
"expressions/conditional/if_expr.sql")
138+
82139
private def runTestFile(relativePath: String, file: SqlTestFile): Unit = {
83140
val allConfigs = file.configs ++ constantFoldingExcluded
84141
withSQLConf(allConfigs: _*) {
@@ -131,7 +188,8 @@ class CometSqlFileTestSuite extends CometTestBase with AdaptiveSparkPlanHelper {
131188
val combinations = configMatrix(parsed.configMatrix)
132189

133190
// Skip tests that require a newer Spark version
134-
val skip = parsed.minSparkVersion.exists(!meetsMinSparkVersion(_)) || isSpark41Plus
191+
val skip = parsed.minSparkVersion.exists(!meetsMinSparkVersion(_)) ||
192+
(isSpark41Plus && excludedInSpark41.contains(relativePath))
135193

136194
if (combinations.size <= 1) {
137195
// No matrix or single combination

0 commit comments

Comments
 (0)