Skip to content

Commit

Permalink
[GLUTEN-6877][CH] Support anti/semi join with inequal join condition (a…
Browse files Browse the repository at this point in the history
…pache#6913)

* support anti/semi join with mixed join condition

* enable uts

* fix tests
  • Loading branch information
lgbo-ustc authored and shamirchen committed Oct 14, 2024
1 parent dc970eb commit 1b0ef20
Show file tree
Hide file tree
Showing 22 changed files with 54 additions and 79 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,12 @@ import com.google.protobuf.{Any, StringValue}
import io.substrait.proto.JoinRel

object JoinTypeTransform {
def toNativeJoinType(joinType: JoinType): JoinType = {
joinType match {
case ExistenceJoin(_) =>
LeftSemi
case _ =>
joinType
}
}

// ExistenceJoin is introduced in #SPARK-14781. It returns all rows from the left table with
// a new column to indecate whether the row is matched in the right table.
// Indeed, the ExistenceJoin is transformed into left any join in CH.
// We don't have left any join in substrait, so use left semi join instead.
// and isExistenceJoin is set to true to indicate that it is an existence join.
def toSubstraitJoinType(sparkJoin: JoinType, buildRight: Boolean): JoinRel.JoinType =
sparkJoin match {
case _: InnerLike =>
Expand Down Expand Up @@ -104,7 +101,7 @@ case class CHShuffledHashJoinExecTransformer(
override protected def doValidateInternal(): ValidationResult = {
val shouldFallback =
CHJoinValidateUtil.shouldFallback(
ShuffleHashJoinStrategy(finalJoinType),
ShuffleHashJoinStrategy(joinType),
left.outputSet,
right.outputSet,
condition)
Expand All @@ -113,7 +110,6 @@ case class CHShuffledHashJoinExecTransformer(
}
super.doValidateInternal()
}
private val finalJoinType = JoinTypeTransform.toNativeJoinType(joinType)

override def genJoinParameters(): Any = {
val (isBHJ, isNullAwareAntiJoin, buildHashTableId): (Int, Int, String) = (0, 0, "")
Expand Down Expand Up @@ -226,7 +222,7 @@ case class CHBroadcastHashJoinExecTransformer(
override protected def doValidateInternal(): ValidationResult = {
val shouldFallback =
CHJoinValidateUtil.shouldFallback(
BroadcastHashJoinStrategy(finalJoinType),
BroadcastHashJoinStrategy(joinType),
left.outputSet,
right.outputSet,
condition)
Expand Down Expand Up @@ -255,7 +251,7 @@ case class CHBroadcastHashJoinExecTransformer(
val context =
BroadCastHashJoinContext(
buildKeyExprs,
finalJoinType,
joinType,
buildSide == BuildRight,
isMixedCondition(condition),
joinType.isInstanceOf[ExistenceJoin],
Expand All @@ -278,12 +274,6 @@ case class CHBroadcastHashJoinExecTransformer(
res
}

// ExistenceJoin is introduced in #SPARK-14781. It returns all rows from the left table with
// a new column to indecate whether the row is matched in the right table.
// Indeed, the ExistenceJoin is transformed into left any join in CH.
// We don't have left any join in substrait, so use left semi join instead.
// and isExistenceJoin is set to true to indicate that it is an existence join.
private val finalJoinType = JoinTypeTransform.toNativeJoinType(joinType)
override protected lazy val substraitJoinType: JoinRel.JoinType = {
JoinTypeTransform.toSubstraitJoinType(joinType, buildSide == BuildRight)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,10 @@ case class BroadcastHashJoinStrategy(joinType: JoinType) extends JoinStrategy {}
case class SortMergeJoinStrategy(joinType: JoinType) extends JoinStrategy {}

/**
* The logic here is that if it is not an equi-join spark will create BNLJ, which will fallback, if
* it is an equi-join, spark will create BroadcastHashJoin or ShuffleHashJoin, for these join types,
* we need to filter For cases that cannot be handled by the backend, 1 there are at least two
* different tables column and Literal in the condition Or condition for comparison, for example: (a
* join b on a.a1 = b.b1 and (a.a2 > 1 or b.b2 < 2) ) 2 tow join key for inequality comparison (!= ,
* > , <), for example: (a join b on a.a1 > b.b1) There will be a fallback for Nullaware Jion For
* Existence Join which is just an optimization of exist subquery, it will also fallback
* BroadcastHashJoinStrategy and ShuffleHashJoinStrategy are relatively complete, They support
* left/right/inner full/anti/semi join, existence Join, and also support join contiditions with
* columns from both sides. e.g. (a join b on a.a1 = b.b1 and a.a2 > 1 and b.b2 < 2)
* SortMergeJoinStrategy is not fully supported for all cases in CH.
*/

object CHJoinValidateUtil extends Logging {
Expand All @@ -52,33 +49,24 @@ object CHJoinValidateUtil extends Logging {
leftOutputSet: AttributeSet,
rightOutputSet: AttributeSet,
condition: Option[Expression]): Boolean = {
var shouldFallback = false
val joinType = joinStrategy.joinType

if (!joinType.isInstanceOf[ExistenceJoin] && joinType.sql.contains("INNER")) {
shouldFallback = false;
} else if (
val hasMixedFilterCondition =
condition.isDefined && hasTwoTableColumn(leftOutputSet, rightOutputSet, condition.get)
) {
shouldFallback = joinStrategy match {
case BroadcastHashJoinStrategy(joinTy) =>
joinTy.sql.contains("SEMI") || joinTy.sql.contains("ANTI")
case SortMergeJoinStrategy(_) => true
case ShuffleHashJoinStrategy(joinTy) =>
joinTy.sql.contains("SEMI") || joinTy.sql.contains("ANTI")
case UnknownJoinStrategy(joinTy) =>
joinTy.sql.contains("SEMI") || joinTy.sql.contains("ANTI")
}
} else {
shouldFallback = joinStrategy match {
case SortMergeJoinStrategy(joinTy) =>
joinTy.sql.contains("SEMI") || joinTy.sql.contains("ANTI") || joinTy.toString.contains(
"ExistenceJoin")
case _ => false
}
val shouldFallback = joinStrategy match {
case SortMergeJoinStrategy(joinType) =>
if (!joinType.isInstanceOf[ExistenceJoin] && joinType.sql.contains("INNER")) {
false
} else {
joinType.sql.contains("SEMI") || joinType.sql.contains("ANTI") || joinType.toString
.contains("ExistenceJoin") || hasMixedFilterCondition
}
case UnknownJoinStrategy(joinType) =>
throw new IllegalArgumentException(s"Unknown join type $joinStrategy")
case _ => false
}

if (shouldFallback) {
logError(s"Fallback for join type $joinType")
logError(s"Fallback for join type $joinStrategy")
}
shouldFallback
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ class GlutenClickHouseColumnarMemorySortShuffleSuite
}

test("TPCH Q21") {
runTPCHQuery(21, noFallBack = false) { df => }
runTPCHQuery(21) { df => }
}

test("TPCH Q22") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ class GlutenClickHouseColumnarShuffleAQESuite
}

test("TPCH Q21") {
runTPCHQuery(21, noFallBack = false) { df => }
runTPCHQuery(21) { df => }
}

test("TPCH Q22") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ class GlutenClickHouseDSV2ColumnarShuffleSuite extends GlutenClickHouseTPCHAbstr
}

test("TPCH Q21") {
runTPCHQuery(21, noFallBack = false) { df => }
runTPCHQuery(21) { df => }
}

test("TPCH Q22") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ class GlutenClickHouseDSV2Suite extends GlutenClickHouseTPCHAbstractSuite {
}

test("TPCH Q21") {
runTPCHQuery(21, noFallBack = false) { df => }
runTPCHQuery(21) { df => }
}

test("TPCH Q22") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ class GlutenClickHouseDecimalSuite
decimalTPCHTables.foreach {
dt =>
{
val fallBack = (sql_num == 16 || sql_num == 21)
val fallBack = (sql_num == 16)
val compareResult = !dt._2.contains(sql_num)
val native = if (fallBack) "fallback" else "native"
val compare = if (compareResult) "compare" else "noCompare"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,11 +62,7 @@ abstract class GlutenClickHouseTPCDSAbstractSuite
})

protected def fallbackSets(isAqe: Boolean): Set[Int] = {
val more = if (isSparkVersionGE("3.5")) Set(44, 67, 70) else Set.empty[Int]

// q16 smj + left semi + not condition
// Q94 BroadcastHashJoin, LeftSemi, NOT condition
Set(16, 94) | more
if (isSparkVersionGE("3.5")) Set(44, 67, 70) else Set.empty[Int]
}
protected def excludedTpcdsQueries: Set[String] = Set(
"q66" // inconsistent results
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ class GlutenClickHouseTPCHNullableColumnarShuffleSuite extends GlutenClickHouseT
}

test("TPCH Q21") {
runTPCHQuery(21, noFallBack = false) { df => }
runTPCHQuery(21) { df => }
}

test("TPCH Q22") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ class GlutenClickHouseTPCHNullableSuite extends GlutenClickHouseTPCHAbstractSuit
}

test("TPCH Q21") {
runTPCHQuery(21, noFallBack = false) { df => }
runTPCHQuery(21) { df => }
}

test("TPCH Q22") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ class GlutenClickHouseTPCHSuite extends GlutenClickHouseTPCHAbstractSuite {
}

test("TPCH Q21") {
runTPCHQuery(21, noFallBack = false) { df => }
runTPCHQuery(21) { df => }
}

test("TPCH Q22") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,6 @@ class GlutenClickHouseTPCDSParquetColumnarShuffleAQESuite
| LIMIT 100 ;
|""".stripMargin
// There are some BroadcastHashJoin with NOT condition
compareResultsAgainstVanillaSpark(sql, true, { df => }, false)
compareResultsAgainstVanillaSpark(sql, true, { df => })
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,10 @@ class GlutenClickHouseTPCDSParquetGraceHashJoinSuite extends GlutenClickHouseTPC
.set("spark.io.compression.codec", "snappy")
.set("spark.sql.shuffle.partitions", "5")
.set("spark.sql.autoBroadcastJoinThreshold", "10MB")
.set("spark.memory.offHeap.size", "8g")
.set("spark.memory.offHeap.size", "6g")
.set("spark.gluten.sql.columnar.backend.ch.runtime_settings.join_algorithm", "grace_hash")
.set("spark.gluten.sql.columnar.backend.ch.runtime_settings.max_bytes_in_join", "314572800")
.setMaster("local[2]")
}

executeTPCDSTest(false)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,10 @@ class GlutenClickHouseTPCDSParquetSortMergeJoinSuite extends GlutenClickHouseTPC
.set("spark.shuffle.manager", "sort")
.set("spark.io.compression.codec", "snappy")
.set("spark.sql.shuffle.partitions", "5")
.set("spark.sql.autoBroadcastJoinThreshold", "10MB")
.set("spark.memory.offHeap.size", "8g")
.set("spark.sql.autoBroadcastJoinThreshold", "-1")
.set("spark.memory.offHeap.size", "6g")
.set("spark.gluten.sql.columnar.forceShuffledHashJoin", "false")
.setMaster("local[2]")
}

executeTPCDSTest(false)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ class GlutenClickHouseTPCHColumnarShuffleParquetAQESuite
}

test("TPCH Q21") {
runTPCHQuery(21, noFallBack = false) {
runTPCHQuery(21) {
df =>
val plans = collect(df.queryExecution.executedPlan) {
case scanExec: BasicScanExecTransformer => scanExec
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ class GlutenClickHouseTPCHParquetAQEConcurrentSuite
.set("spark.shuffle.manager", "sort")
.set("spark.io.compression.codec", "snappy")
.set("spark.sql.shuffle.partitions", "5")
.set("spark.sql.autoBroadcastJoinThreshold", "10MB")
.set("spark.sql.adaptive.enabled", "true")
.set("spark.sql.autoBroadcastJoinThreshold", "-1")
}
Expand Down Expand Up @@ -82,5 +81,4 @@ class GlutenClickHouseTPCHParquetAQEConcurrentSuite
queries.map(queryId => runTPCHQuery(queryId) { df => })

}

}
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ class GlutenClickHouseTPCHParquetAQESuite
}

test("TPCH Q21") {
runTPCHQuery(21, noFallBack = false) { df => }
runTPCHQuery(21) { df => }
}

test("TPCH Q22") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,7 @@ class GlutenClickHouseTPCHSaltNullParquetSuite extends GlutenClickHouseTPCHAbstr
}

test("TPCH Q21") {
runTPCHQuery(21, noFallBack = false) { df => }
runTPCHQuery(21) { df => }
}

test("GLUTEN-2115: Fix wrong number of records shuffle written") {
Expand Down
2 changes: 1 addition & 1 deletion cpp-ch/local-engine/Common/QueryContext.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -172,4 +172,4 @@ double currentThreadGroupMemoryUsageRatio()
}
return static_cast<double>(CurrentThread::getGroup()->memory_tracker.get()) / CurrentThread::getGroup()->memory_tracker.getSoftLimit();
}
}
}
13 changes: 7 additions & 6 deletions cpp-ch/local-engine/Parser/JoinRelParser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -681,14 +681,14 @@ bool JoinRelParser::couldRewriteToMultiJoinOnClauses(
auto optional_keys = parse_join_keys(current_expr);
if (!optional_keys)
{
LOG_ERROR(getLogger("JoinRelParser"), "Not equal comparison for keys from both tables");
LOG_DEBUG(getLogger("JoinRelParser"), "Not equal comparison for keys from both tables");
return false;
}
join_on_clause.addKey(optional_keys->first, optional_keys->second, false);
}
else
{
LOG_ERROR(getLogger("JoinRelParser"), "And or equals function is expected");
LOG_DEBUG(getLogger("JoinRelParser"), "And or equals function is expected");
return false;
}
}
Expand All @@ -701,7 +701,8 @@ bool JoinRelParser::couldRewriteToMultiJoinOnClauses(
expression_stack.pop_back();
if (!check_function("or", current_expr))
{
LOG_ERROR(getLogger("JoinRelParser"), "Not an or expression");
LOG_DEBUG(getLogger("JoinRelParser"), "Not an or expression");
return false;
}

auto get_current_join_on_clause = [&]()
Expand All @@ -719,7 +720,7 @@ bool JoinRelParser::couldRewriteToMultiJoinOnClauses(
auto optional_keys = parse_join_keys(arg.value());
if (!optional_keys)
{
LOG_ERROR(getLogger("JoinRelParser"), "Not equal comparison for keys from both tables");
LOG_DEBUG(getLogger("JoinRelParser"), "Not equal comparison for keys from both tables");
return false;
}
get_current_join_on_clause()->addKey(optional_keys->first, optional_keys->second, false);
Expand All @@ -728,7 +729,7 @@ bool JoinRelParser::couldRewriteToMultiJoinOnClauses(
{
if (!parse_and_expression(arg.value(), *get_current_join_on_clause()))
{
LOG_ERROR(getLogger("JoinRelParser"), "Parse and expression failed");
LOG_DEBUG(getLogger("JoinRelParser"), "Parse and expression failed");
return false;
}
}
Expand All @@ -738,7 +739,7 @@ bool JoinRelParser::couldRewriteToMultiJoinOnClauses(
}
else
{
LOG_ERROR(getLogger("JoinRelParser"), "Unknow function");
LOG_DEBUG(getLogger("JoinRelParser"), "Unknow function");
return false;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ class GlutenClickHouseRSSColumnarMemorySortShuffleSuite
}

test("TPCH Q21") {
runTPCHQuery(21, noFallBack = false) { df => }
runTPCHQuery(21) { df => }
}

test("TPCH Q22") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ class GlutenClickHouseRSSColumnarShuffleAQESuite
}

test("TPCH Q21") {
runTPCHQuery(21, noFallBack = false) { df => }
runTPCHQuery(21) { df => }
}

test("TPCH Q22") {
Expand Down

0 comments on commit 1b0ef20

Please sign in to comment.