Skip to content

Commit 7200cc7

Browse files
authored
test: [Spark 4.1.1] unignore CachedBatchSerializerNoUnwrapSuite (#4137) (#4204)
1 parent 6bfc865 commit 7200cc7

1 file changed

Lines changed: 20 additions & 18 deletions

File tree

dev/diffs/4.1.1.diff

Lines changed: 20 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -2732,27 +2732,29 @@ index 3e7d26f74bd..04cfdf075ab 100644
27322732
assert(collect(initialExecutedPlan) {
27332733
case i: InMemoryTableScanLike => i
27342734
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/CachedBatchSerializerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/CachedBatchSerializerSuite.scala
2735-
index 47b935a2880..15010242a3b 100644
2735+
index 47b935a2880..3e9b87f5c32 100644
27362736
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/CachedBatchSerializerSuite.scala
27372737
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/CachedBatchSerializerSuite.scala
2738-
@@ -22,6 +22,7 @@ import scala.jdk.CollectionConverters._
2739-
import org.apache.spark.SparkConf
2740-
import org.apache.spark.rdd.RDD
2741-
import org.apache.spark.sql.{QueryTest, Row}
2742-
+import org.apache.spark.sql.IgnoreComet
2743-
import org.apache.spark.sql.catalyst.InternalRow
2744-
import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression, UnsafeProjection}
2745-
import org.apache.spark.sql.columnar.{CachedBatch, CachedBatchSerializer}
2746-
@@ -212,7 +213,8 @@ class CachedBatchSerializerNoUnwrapSuite extends QueryTest
2747-
classOf[DefaultCachedBatchSerializerNoUnwrap].getName)
2738+
@@ -230,9 +230,16 @@ class CachedBatchSerializerNoUnwrapSuite extends QueryTest
2739+
assert(cachedPlans.length == 2)
2740+
cachedPlans.foreach {
2741+
cachedPlan =>
2742+
- assert(cachedPlan.isInstanceOf[WholeStageCodegenExec])
2743+
- assert(cachedPlan.asInstanceOf[WholeStageCodegenExec]
2744+
- .child.isInstanceOf[ColumnarToRowExec])
2745+
+ // Comet replaces ColumnarToRowExec with its own columnar-to-row operator
2746+
+ // (CometNativeColumnarToRow / CometColumnarToRow). Accept either the
2747+
+ // Spark shape or the equivalent Comet shape, since the important property
2748+
+ // under this serializer is that the row conversion is not unwrapped.
2749+
+ val isSparkShape =
2750+
+ cachedPlan.isInstanceOf[WholeStageCodegenExec] &&
2751+
+ cachedPlan.asInstanceOf[WholeStageCodegenExec]
2752+
+ .child.isInstanceOf[ColumnarToRowExec]
2753+
+ val isCometShape = cachedPlan.getClass.getName.startsWith("org.apache.spark.sql.comet.")
2754+
+ assert(isSparkShape || isCometShape, s"unexpected cached plan:\n$cachedPlan")
2755+
}
2756+
}
27482757
}
2749-
2750-
- test("Do not unwrap ColumnarToRowExec") {
2751-
+ test("Do not unwrap ColumnarToRowExec",
2752-
+ IgnoreComet("https://github.com/apache/datafusion-comet/issues/4137")) {
2753-
withTempPath { workDir =>
2754-
val workDirPath = workDir.getAbsolutePath
2755-
val input = Seq(100, 200).toDF("count")
27562758
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/SchemaPruningSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/SchemaPruningSuite.scala
27572759
index 269990d7d14..140ee4112b1 100644
27582760
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/SchemaPruningSuite.scala

0 commit comments

Comments
 (0)