Skip to content

Commit

Permalink
[GLUTEN-1577][CORE][Fix] Respect spark's config for case sensitive wh…
Browse files Browse the repository at this point in the history
…en get attribute name (apache#1578)
  • Loading branch information
exmy authored May 11, 2023
1 parent 74db759 commit ac2b18f
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@

package io.glutenproject.expression

import java.util.Locale

import io.glutenproject.execution.{BasicScanExecTransformer, BatchScanExecTransformer, FileSourceScanExecTransformer}
import io.glutenproject.substrait.`type`._
import io.glutenproject.substrait.rel.LocalFilesNode.ReadFileFormat
Expand All @@ -26,6 +28,7 @@ import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.optimizer._
import org.apache.spark.sql.catalyst.plans.{FullOuter, Inner, JoinType, LeftAnti, LeftOuter, LeftSemi, RightOuter}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.sql.vectorized.ColumnarBatch

Expand Down Expand Up @@ -76,11 +79,16 @@ object ConverterUtils extends Logging {
}

def getShortAttributeName(attr: Attribute): String = {
val subIndex = attr.name.indexOf("(")
val name = if (SQLConf.get.caseSensitiveAnalysis) {
attr.name
} else {
attr.name.toLowerCase(Locale.ROOT)
}
val subIndex = name.indexOf("(")
if (subIndex != -1) {
attr.name.substring(0, subIndex)
name.substring(0, subIndex)
} else {
attr.name
name
}
}

Expand All @@ -89,7 +97,7 @@ object ConverterUtils extends Logging {
}

def isNullable(nullability: Type.Nullability): Boolean = {
return nullability == Type.Nullability.NULLABILITY_NULLABLE
nullability == Type.Nullability.NULLABILITY_NULLABLE
}

def parseFromSubstraitType(substraitType: Type): (DataType, Boolean) = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,8 @@ class ClickHouseTestSettings extends BackendTestSettings {
"inner join where, one match per row",
"left semi join",
"multiple-key equi-join is hash-join",
"full outer join"
"full outer join",
GlutenTestConstants.GLUTEN_TEST + "test case sensitive for BHJ"
)

enableSuite[GlutenHashExpressionsSuite]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,17 @@ class GlutenJoinSuite extends JoinSuite with GlutenSQLTestsTrait {
// NaN is not supported currently, just skip.
"NaN and -0.0 in join keys"
)

test(GlutenTestConstants.GLUTEN_TEST + "test case sensitive for BHJ") {
spark.sql("create table t_bhj(a int, b int, C int) using parquet")
spark.sql("insert overwrite t_bhj select id as a, (id+1) as b, (id+2) as c from range(3)")
val sql =
"""
|select /*+ BROADCAST(t1) */ t0.a, t0.b
|from t_bhj as t0 join t_bhj as t1 on t0.a = t1.a and t0.b = t1.b and t0.c = t1.c
|group by t0.a, t0.b
|order by t0.a, t0.b
|""".stripMargin
checkAnswer(spark.sql(sql), Seq(Row(0, 1), Row(1, 2), Row(2, 3)))
}
}

0 comments on commit ac2b18f

Please sign in to comment.