diff --git a/spark/src/main/scala/org/apache/comet/CometSparkSessionExtensions.scala b/spark/src/main/scala/org/apache/comet/CometSparkSessionExtensions.scala index d52f31f54a..4449c9aad4 100644 --- a/spark/src/main/scala/org/apache/comet/CometSparkSessionExtensions.scala +++ b/spark/src/main/scala/org/apache/comet/CometSparkSessionExtensions.scala @@ -35,7 +35,7 @@ import org.apache.spark.sql.comet.execution.shuffle.{CometColumnarShuffle, Comet import org.apache.spark.sql.comet.util.Utils import org.apache.spark.sql.execution._ import org.apache.spark.sql.execution.adaptive.{AQEShuffleReadExec, BroadcastQueryStageExec, ShuffleQueryStageExec} -import org.apache.spark.sql.execution.aggregate.{BaseAggregateExec, HashAggregateExec, ObjectHashAggregateExec} +import org.apache.spark.sql.execution.aggregate.{BaseAggregateExec, HashAggregateExec} import org.apache.spark.sql.execution.datasources._ import org.apache.spark.sql.execution.datasources.csv.CSVFileFormat import org.apache.spark.sql.execution.datasources.json.JsonFileFormat @@ -438,8 +438,7 @@ class CometSparkSessionExtensions } case op: BaseAggregateExec - if op.isInstanceOf[HashAggregateExec] || - op.isInstanceOf[ObjectHashAggregateExec] => + if op.isInstanceOf[HashAggregateExec] => val groupingExprs = op.groupingExpressions val aggExprs = op.aggregateExpressions val resultExpressions = op.resultExpressions diff --git a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala index 51b32b7dfc..16cf15a73b 100644 --- a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala +++ b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala @@ -34,7 +34,7 @@ import org.apache.spark.sql.comet.execution.shuffle.CometShuffleExchangeExec import org.apache.spark.sql.execution import org.apache.spark.sql.execution._ import org.apache.spark.sql.execution.adaptive.{BroadcastQueryStageExec, ShuffleQueryStageExec} -import org.apache.spark.sql.execution.aggregate.{BaseAggregateExec, HashAggregateExec, ObjectHashAggregateExec} +import org.apache.spark.sql.execution.aggregate.{BaseAggregateExec, HashAggregateExec} import org.apache.spark.sql.execution.exchange.{BroadcastExchangeExec, ReusedExchangeExec, ShuffleExchangeExec} import org.apache.spark.sql.execution.joins.{BroadcastHashJoinExec, HashJoin, ShuffledHashJoinExec, SortMergeJoinExec} import org.apache.spark.sql.execution.window.WindowExec @@ -2813,8 +2813,7 @@ object QueryPlanSerde extends Logging with ShimQueryPlanSerde with CometExprShim } case aggregate: BaseAggregateExec - if (aggregate.isInstanceOf[HashAggregateExec] || - aggregate.isInstanceOf[ObjectHashAggregateExec]) && + if aggregate.isInstanceOf[HashAggregateExec] && CometConf.COMET_EXEC_AGGREGATE_ENABLED.get(conf) => val groupingExpressions = aggregate.groupingExpressions val aggregateExpressions = aggregate.aggregateExpressions