diff --git a/gluten-core/src/main/scala/io/glutenproject/execution/BasicPhysicalOperatorTransformer.scala b/gluten-core/src/main/scala/io/glutenproject/execution/BasicPhysicalOperatorTransformer.scala index 7c8e496eb3bf6..f17df39bc6094 100644 --- a/gluten-core/src/main/scala/io/glutenproject/execution/BasicPhysicalOperatorTransformer.scala +++ b/gluten-core/src/main/scala/io/glutenproject/execution/BasicPhysicalOperatorTransformer.scala @@ -48,7 +48,6 @@ abstract class FilterExecTransformerBase(val cond: Expression, val input: SparkP extends UnaryExecNode with TransformSupport with PredicateHelper - with AliasAwareOutputPartitioning with Logging { // Note: "metrics" is made transient to avoid sending driver-side metrics to tasks. @@ -130,8 +129,6 @@ abstract class FilterExecTransformerBase(val cond: Expression, val input: SparkP } } - override protected def outputExpressions: Seq[NamedExpression] = output - override def output: Seq[Attribute] = { child.output.map { a => @@ -220,7 +217,6 @@ case class ProjectExecTransformer private (projectList: Seq[NamedExpression], ch extends UnaryExecNode with TransformSupport with PredicateHelper - with AliasAwareOutputPartitioning with Logging { // Note: "metrics" is made transient to avoid sending driver-side metrics to tasks. @@ -358,9 +354,7 @@ case class ProjectExecTransformer private (projectList: Seq[NamedExpression], ch override def doExecuteColumnar(): RDD[ColumnarBatch] = { throw new UnsupportedOperationException(s"This operator doesn't support doExecuteColumnar().") } - - override protected def outputExpressions: Seq[NamedExpression] = projectList - + override protected def doExecute() : org.apache.spark.rdd.RDD[org.apache.spark.sql.catalyst.InternalRow] = { throw new UnsupportedOperationException(s"This operator doesn't support doExecute().") diff --git a/shims/spark34/src/main/scala/org/apache/spark/sql/execution/AliasAwareOutputExpression.scala b/shims/spark34/src/main/scala/org/apache/spark/sql/execution/AliasAwareOutputExpression.scala deleted file mode 100644 index 2a21af5980f22..0000000000000 --- a/shims/spark34/src/main/scala/org/apache/spark/sql/execution/AliasAwareOutputExpression.scala +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.spark.sql.execution - -import org.apache.spark.sql.catalyst.plans.AliasAwareOutputExpression - -/** - * A trait that handles aliases in the `outputExpressions` to produce `outputPartitioning` that - * satisfies distribution requirements. - */ -trait AliasAwareOutputPartitioning extends AliasAwareOutputExpression {}