Skip to content

Commit

Permalink
Initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
PHILO-HE committed Jun 30, 2024
1 parent 86449d0 commit 402de83
Show file tree
Hide file tree
Showing 7 changed files with 13 additions and 36 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -612,13 +612,6 @@ class CHSparkPlanExecApi extends SparkPlanExecApi {
CHStringTranslateTransformer(substraitExprName, srcExpr, matchingExpr, replaceExpr, original)
}

override def genSizeExpressionTransformer(
substraitExprName: String,
child: ExpressionTransformer,
original: Size): ExpressionTransformer = {
CHSizeExpressionTransformer(substraitExprName, child, original)
}

override def genLikeTransformer(
substraitExprName: String,
left: ExpressionTransformer,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,6 @@ import com.google.common.collect.Lists

import java.util.Locale

case class CHSizeExpressionTransformer(
substraitExprName: String,
expr: ExpressionTransformer,
original: Size)
extends BinaryExpressionTransformer {
override def left: ExpressionTransformer = expr
// Pass legacyLiteral as second argument in substrait function
override def right: ExpressionTransformer = LiteralTransformer(original.legacySizeOfNull)
}

case class CHTruncTimestampTransformer(
substraitExprName: String,
format: ExpressionTransformer,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,12 @@ class ScalarFunctionsValidateSuite extends FunctionsValidateTest {
}
}

test("null input for array_size") {
val df = runQueryAndCompare("SELECT array_size(null)") {
checkGlutenOperatorMatch[ProjectExecTransformer]
}
}

test("chr function") {
val df = runQueryAndCompare(
"SELECT chr(l_orderkey + 64) " +
Expand Down
2 changes: 0 additions & 2 deletions cpp/core/config/GlutenConfig.h
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,6 @@ const std::string kGlutenSaveDir = "spark.gluten.saveDir";

const std::string kCaseSensitive = "spark.sql.caseSensitive";

const std::string kLegacySize = "spark.sql.legacy.sizeOfNull";

const std::string kSessionTimezone = "spark.sql.session.timeZone";

const std::string kIgnoreMissingFiles = "spark.sql.files.ignoreMissingFiles";
Expand Down
2 changes: 0 additions & 2 deletions cpp/velox/compute/WholeStageResultIterator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -449,8 +449,6 @@ std::unordered_map<std::string, std::string> WholeStageResultIterator::getQueryC
}
// Adjust timestamp according to the above configured session timezone.
configs[velox::core::QueryConfig::kAdjustTimestampToTimezone] = "true";
// Align Velox size function with Spark.
configs[velox::core::QueryConfig::kSparkLegacySizeOfNull] = std::to_string(veloxCfg_->get<bool>(kLegacySize, true));

{
// partial aggregation memory config
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,8 @@ import org.apache.spark.sql.execution.joins.BuildSideRelation
import org.apache.spark.sql.execution.metric.SQLMetric
import org.apache.spark.sql.execution.python.ArrowEvalPythonExec
import org.apache.spark.sql.hive.HiveTableScanExecTransformer
import org.apache.spark.sql.types.{LongType, NullType, StructType}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{BooleanType, LongType, NullType, StructType}
import org.apache.spark.sql.vectorized.ColumnarBatch

import java.lang.{Long => JLong}
Expand Down Expand Up @@ -464,13 +465,6 @@ trait SparkPlanExecApi {
original)
}

def genSizeExpressionTransformer(
substraitExprName: String,
child: ExpressionTransformer,
original: Size): ExpressionTransformer = {
GenericExpressionTransformer(substraitExprName, Seq(child), original)
}

def genLikeTransformer(
substraitExprName: String,
left: ExpressionTransformer,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -396,14 +396,12 @@ object ExpressionConverter extends SQLConfHelper with Logging {
r
)
case size: Size =>
if (size.legacySizeOfNull != SQLConf.get.legacySizeOfNull) {
throw new GlutenNotSupportException(
"The value of legacySizeOfNull field of size is " +
"not equals to legacySizeOfNull of SQLConf, this case is not supported yet")
}
BackendsApiManager.getSparkPlanExecApiInstance.genSizeExpressionTransformer(
// Covers Spark ArraySize which is replaced by Size(child, false).
val child =
replaceWithExpressionTransformerInternal(size.child, attributeSeq, expressionsMap)
GenericExpressionTransformer(
substraitExprName,
replaceWithExpressionTransformerInternal(size.child, attributeSeq, expressionsMap),
Seq(child, LiteralTransformer(size.legacySizeOfNull)),
size)
case namedStruct: CreateNamedStruct =>
BackendsApiManager.getSparkPlanExecApiInstance.genNamedStructTransformer(
Expand Down

0 comments on commit 402de83

Please sign in to comment.