Skip to content

Commit bcd2eac

Browse files
committed
Fix format issues.
1 parent 4cb98f9 commit bcd2eac

3 files changed

Lines changed: 6 additions & 6 deletions

File tree

spark/src/main/scala/org/apache/comet/rules/CometExecRule.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -339,8 +339,8 @@ case class CometExecRule(session: SparkSession)
339339
}
340340
if (cometChild.isInstanceOf[CometNativeExec]) {
341341
logInfo(
342-
s"Converting SubqueryBroadcastExec to " +
343-
s"CometSubqueryBroadcastExec for DPP exchange reuse")
342+
"Converting SubqueryBroadcastExec to " +
343+
"CometSubqueryBroadcastExec for DPP exchange reuse")
344344
val cometBroadcast = CometBroadcastExchangeExec(b, b.output, b.mode, cometChild)
345345
val cometSub = CometSubqueryBroadcastExec(
346346
sub.name,

spark/src/main/scala/org/apache/spark/sql/comet/CometNativeScanExec.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -185,7 +185,7 @@ case class CometNativeScanExec(
185185
logDebug(s"Resolving DPP subquery: plan=${e.plan.getClass.getSimpleName}")
186186
try {
187187
e.updateResult()
188-
logDebug(s"DPP subquery resolved successfully")
188+
logDebug("DPP subquery resolved successfully")
189189
} catch {
190190
case ex: Exception =>
191191
logError(s"DPP subquery resolution failed: ${ex.getMessage}")
@@ -200,7 +200,7 @@ case class CometNativeScanExec(
200200
scan.partitionFilters.foreach {
201201
case DynamicPruningExpression(e: InSubqueryExec) if e.values().isEmpty =>
202202
logDebug(
203-
s"Resolving CometScanExec DPP subquery: " +
203+
"Resolving CometScanExec DPP subquery: " +
204204
s"plan=${e.plan.getClass.getSimpleName}")
205205
e.updateResult()
206206
case _ =>

spark/src/main/scala/org/apache/spark/sql/comet/CometSubqueryBroadcastExec.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ import org.apache.spark.sql.catalyst.plans.QueryPlan
3131
import org.apache.spark.sql.comet.util.Utils
3232
import org.apache.spark.sql.errors.QueryExecutionErrors
3333
import org.apache.spark.sql.execution._
34-
import org.apache.spark.sql.execution.metric.SQLMetrics
34+
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
3535
import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf}
3636
import org.apache.spark.util.ThreadUtils
3737
import org.apache.spark.util.io.ChunkedByteBuffer
@@ -72,7 +72,7 @@ case class CometSubqueryBroadcastExec(
7272
}
7373
}
7474

75-
override lazy val metrics = Map(
75+
override lazy val metrics: Map[String, SQLMetric] = Map(
7676
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"),
7777
"dataSize" -> SQLMetrics.createMetric(sparkContext, "data size (bytes)"),
7878
"collectTime" -> SQLMetrics.createMetric(sparkContext, "time to collect (ms)"))

0 commit comments

Comments
 (0)