11diff --git a/pom.xml b/pom.xml
2- index dc757d78812..a7bb268dd39 100644
2+ index dc757d78812..0dc4099c5d8 100644
33--- a/pom.xml
44+++ b/pom.xml
55@@ -152,6 +152,8 @@
66 <kryo.version>4.0.3</kryo.version>
77 <ivy.version>2.5.3</ivy.version>
88 <oro.version>2.0.8</oro.version>
9- + <spark.version.short>4.0 </spark.version.short>
10- + <comet.version>0.13 .0-SNAPSHOT</comet.version>
9+ + <spark.version.short>4.1 </spark.version.short>
10+ + <comet.version>0.14 .0-SNAPSHOT</comet.version>
1111 <!--
1212 If you change codahale.metrics.version, you also need to change
1313 the link to metrics.dropwizard.io in docs/monitoring.md.
@@ -37,19 +37,6 @@ index dc757d78812..a7bb268dd39 100644
3737 <!-- SPARK-16484 add `datasketches-java` for support Datasketches HllSketch -->
3838 <dependency>
3939 <groupId>org.apache.datasketches</groupId>
40- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ToStringBase.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ToStringBase.scala
41- index bc294fd722b..33bef120c0a 100644
42- --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ToStringBase.scala
43- +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ToStringBase.scala
44- @@ -457,7 +457,7 @@ trait ToStringBase { self: UnaryExpression with TimeZoneAwareExpression =>
45-
46- object ToStringBase {
47- def getBinaryFormatter: BinaryFormatter = {
48- - val style = SQLConf.get.getConf(SQLConf.BINARY_OUTPUT_STYLE)
49- + val style: = SQLConf.get.getConf(SQLConf.BINARY_OUTPUT_STYLE)
50- style match {
51- case Some(BinaryOutputStyle.UTF8) =>
52- (array: Array[Byte]) => UTF8String.fromBytes(array)
5340diff --git a/sql/core/pom.xml b/sql/core/pom.xml
5441index d2d07a08aa9..d89f80e5b68 100644
5542--- a/sql/core/pom.xml
@@ -700,57 +687,6 @@ index 95e86fe4311..24f99384bb7 100644
700687 }.flatten
701688 assert(filters.contains(GreaterThan(scan.logicalPlan.output.head, Literal(5L))))
702689 }
703- diff --git a/sql/core/src/test/scala/org/apache/spark/sql/IgnoreComet.scala b/sql/core/src/test/scala/org/apache/spark/sql/IgnoreComet.scala
704- new file mode 100644
705- index 00000000000..5691536c114
706- --- /dev/null
707- +++ b/sql/core/src/test/scala/org/apache/spark/sql/IgnoreComet.scala
708- @@ -0,0 +1,45 @@
709- + /*
710- + * Licensed to the Apache Software Foundation (ASF) under one or more
711- + * contributor license agreements. See the NOTICE file distributed with
712- + * this work for additional information regarding copyright ownership.
713- + * The ASF licenses this file to You under the Apache License, Version 2.0
714- + * (the "License"); you may not use this file except in compliance with
715- + * the License. You may obtain a copy of the License at
716- + *
717- + * http://www.apache.org/licenses/LICENSE-2.0
718- + *
719- + * Unless required by applicable law or agreed to in writing, software
720- + * distributed under the License is distributed on an "AS IS" BASIS,
721- + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
722- + * See the License for the specific language governing permissions and
723- + * limitations under the License.
724- + */
725- +
726- + package org.apache.spark.sql
727- +
728- + import org.scalactic.source.Position
729- + import org.scalatest.Tag
730- +
731- + import org.apache.spark.sql.test.SQLTestUtils
732- +
733- + /**
734- + * Tests with this tag will be ignored when Comet is enabled (e.g., via `ENABLE_COMET`).
735- + */
736- + case class IgnoreComet(reason: String) extends Tag("DisableComet")
737- + case class IgnoreCometNativeIcebergCompat(reason: String) extends Tag("DisableComet")
738- + case class IgnoreCometNativeDataFusion(reason: String) extends Tag("DisableComet")
739- + case class IgnoreCometNativeScan(reason: String) extends Tag("DisableComet")
740- +
741- + /**
742- + * Helper trait that disables Comet for all tests regardless of default config values.
743- + */
744- + trait IgnoreCometSuite extends SQLTestUtils {
745- + override protected def test(testName: String, testTags: Tag*)(testFun: => Any)
746- + (implicit pos: Position): Unit = {
747- + if (isCometEnabled) {
748- + ignore(testName + " (disabled when Comet is on)", testTags: _*)(testFun)
749- + } else {
750- + super.test(testName, testTags: _*)(testFun)
751- + }
752- + }
753- + }
754690diff --git a/sql/core/src/test/scala/org/apache/spark/sql/InjectRuntimeFilterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/InjectRuntimeFilterSuite.scala
755691index 7d7185ae6c1..442a5bddeb8 100644
756692--- a/sql/core/src/test/scala/org/apache/spark/sql/InjectRuntimeFilterSuite.scala
@@ -1819,14 +1755,14 @@ index 47679ed7865..9ffbaecb98e 100644
18191755 assert(collectWithSubqueries(plan) { case s: SortAggregateExec => s }.length == sortAggCount)
18201756 }
18211757diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkPlanSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkPlanSuite.scala
1822- index aed11badb71..ab7e9456e26 100644
1758+ index aed11badb71..1a0b22ef6fd 100644
18231759--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkPlanSuite.scala
18241760+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkPlanSuite.scala
18251761@@ -23,6 +23,7 @@ import org.apache.spark.sql.QueryTest
18261762 import org.apache.spark.sql.catalyst.InternalRow
18271763 import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
18281764 import org.apache.spark.sql.catalyst.plans.logical.Deduplicate
1829- + import org.apache.spark.sql.comet.CometColumnarToRowExec
1765+ + import org.apache.spark.sql.comet.CometNativeColumnarToRowExec
18301766 import org.apache.spark.sql.execution.datasources.v2.BatchScanExec
18311767 import org.apache.spark.sql.internal.SQLConf
18321768 import org.apache.spark.sql.test.SharedSparkSession
@@ -1837,20 +1773,20 @@ index aed11badb71..ab7e9456e26 100644
18371773- df.queryExecution.executedPlan.collectFirst { case p: ColumnarToRowExec => p }.get
18381774+ df.queryExecution.executedPlan.collectFirst {
18391775+ case p: ColumnarToRowExec => p
1840- + case p: CometColumnarToRowExec => p
1776+ + case p: CometNativeColumnarToRowExec => p
18411777+ }.get
18421778 try {
18431779 spark.range(1).foreach { _ =>
18441780 columnarToRowExec.canonicalized
18451781diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSuite.scala
1846- index a3cfdc5a240..1b08a1f42ee 100644
1782+ index a3cfdc5a240..87039732332 100644
18471783--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSuite.scala
18481784+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSuite.scala
18491785@@ -22,6 +22,7 @@ import org.apache.spark.rdd.MapPartitionsWithEvaluatorRDD
18501786 import org.apache.spark.sql.{Dataset, QueryTest, Row, SaveMode}
18511787 import org.apache.spark.sql.catalyst.expressions.CodegenObjectFactoryMode
18521788 import org.apache.spark.sql.catalyst.expressions.codegen.{ByteCodeStats, CodeAndComment, CodeGenerator}
1853- + import org.apache.spark.sql.comet.{CometColumnarToRowExec, CometHashJoinExec , CometSortExec, CometSortMergeJoinExec}
1789+ + import org.apache.spark.sql.comet.{CometHashJoinExec, CometNativeColumnarToRowExec , CometSortExec, CometSortMergeJoinExec}
18541790 import org.apache.spark.sql.execution.adaptive.DisableAdaptiveExecutionSuite
18551791 import org.apache.spark.sql.execution.aggregate.{HashAggregateExec, SortAggregateExec}
18561792 import org.apache.spark.sql.execution.columnar.InMemoryTableScanExec
@@ -2063,12 +1999,13 @@ index a3cfdc5a240..1b08a1f42ee 100644
20631999 val projection = Seq.tabulate(columnNum)(i => s"c$i + c$i as newC$i")
20642000 val df = spark.read.parquet(path).selectExpr(projection: _*)
20652001
2066- @@ -815,6 +852,9 @@ class WholeStageCodegenSuite extends QueryTest with SharedSparkSession
2002+ @@ -815,6 +852,10 @@ class WholeStageCodegenSuite extends QueryTest with SharedSparkSession
20672003 assert(distinctWithId.queryExecution.executedPlan.exists {
20682004 case WholeStageCodegenExec(
20692005 ProjectExec(_, BroadcastHashJoinExec(_, _, _, _, _, _: HashAggregateExec, _, _))) => true
20702006+ case WholeStageCodegenExec(
2071- + ProjectExec(_, BroadcastHashJoinExec(_, _, _, _, _, _: CometColumnarToRowExec, _, _))) =>
2007+ + ProjectExec(_, BroadcastHashJoinExec(_, _, _, _, _,
2008+ + InputAdapter(_: CometNativeColumnarToRowExec), _, _))) =>
20722009+ true
20732010 case _ => false
20742011 })
@@ -3068,7 +3005,7 @@ index e31e0e70cf3..034ad5b953e 100644
30683005 import testImplicits._
30693006
30703007diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala
3071- index 83ebd24384c..eeaa1d94bc0 100644
3008+ index 83ebd24384c..39c2f46c9b8 100644
30723009--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala
30733010+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala
30743011@@ -26,10 +26,11 @@ import org.apache.spark.sql.catalyst.expressions
@@ -3144,7 +3081,7 @@ index 83ebd24384c..eeaa1d94bc0 100644
31443081+ case s: SortMergeJoinExec => s
31453082+ case o => fail(s"expected SortMergeJoinExec, but found\n$o")
31463083+ }
3147- + case CometColumnarToRowExec (child) =>
3084+ + case CometNativeColumnarToRowExec (child) =>
31483085+ child.asInstanceOf[CometSortMergeJoinExec].originalPlan match {
31493086+ case s: SortMergeJoinExec => s
31503087+ case o => fail(s"expected SortMergeJoinExec, but found\n$o")
@@ -3398,15 +3335,15 @@ index 22028a585e2..20c6b7c796a 100644
33983335 }
33993336
34003337diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala
3401- index 7adf98b7920..fd1696da07b 100644
3338+ index 7adf98b7920..0939894117a 100644
34023339--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala
34033340+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala
34043341@@ -43,7 +43,7 @@ import org.apache.spark.sql.catalyst.types.DataTypeUtils.toAttributes
34053342 import org.apache.spark.sql.classic.{DataFrame, Dataset}
34063343 import org.apache.spark.sql.connector.read.InputPartition
34073344 import org.apache.spark.sql.connector.read.streaming.{Offset => OffsetV2, ReadLimit}
34083345- import org.apache.spark.sql.execution.exchange.{REQUIRED_BY_STATEFUL_OPERATOR, ReusedExchangeExec, ShuffleExchangeExec}
3409- + import org.apache.spark.sql.execution.exchange.{REQUIRED_BY_STATEFUL_OPERATOR, ReusedExchangeExec, ShuffleExchangeLike}
3346+ + import org.apache.spark.sql.execution.exchange.{REQUIRED_BY_STATEFUL_OPERATOR, ReusedExchangeExec, ShuffleExchangeExec, ShuffleExchangeLike}
34103347 import org.apache.spark.sql.execution.streaming._
34113348 import org.apache.spark.sql.execution.streaming.checkpointing.{CheckpointFileManager, OffsetSeqMetadata}
34123349 import org.apache.spark.sql.execution.streaming.operators.stateful.StateStoreSaveExec
0 commit comments