Skip to content

Commit

Permalink
rebase
Browse files Browse the repository at this point in the history
  • Loading branch information
ulysses-you committed Jul 5, 2024
1 parent ddc7785 commit 67c97aa
Show file tree
Hide file tree
Showing 7 changed files with 16 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -422,18 +422,18 @@ class TestOperator extends VeloxWholeStageTransformerSuite with AdaptiveSparkPla
}

// Test same partition/ordering keys.
// runQueryAndCompare(
// "select avg(l_partkey) over" +
// " (partition by l_suppkey order by l_suppkey) from lineitem ") {
// checkGlutenOperatorMatch[WindowExecTransformer]
// }
runQueryAndCompare(
"select avg(l_partkey) over" +
" (partition by l_suppkey order by l_suppkey) from lineitem ") {
checkGlutenOperatorMatch[WindowExecTransformer]
}

// Test overlapping partition/ordering keys.
// runQueryAndCompare(
// "select avg(l_partkey) over" +
// " (partition by l_suppkey order by l_suppkey, l_orderkey) from lineitem ") {
// checkGlutenOperatorMatch[WindowExecTransformer]
// }
runQueryAndCompare(
"select avg(l_partkey) over" +
" (partition by l_suppkey order by l_suppkey, l_orderkey) from lineitem ") {
checkGlutenOperatorMatch[WindowExecTransformer]
}
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ class GlutenSQLWindowFunctionSuite extends SQLWindowFunctionSuite with GlutenSQL
Row(95337, 12, decimal(915.61))
)

ignoreGluten("Literal in window partition by and sort") {
testGluten("Literal in window partition by and sort") {
withTable("customer") {
val rdd = spark.sparkContext.parallelize(customerData)
val customerDF = spark.createDataFrame(rdd, customerSchema)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ class GlutenSQLWindowFunctionSuite extends SQLWindowFunctionSuite with GlutenSQL
Row(95337, 12, decimal(915.61))
)

ignoreGluten("Literal in window partition by and sort") {
testGluten("Literal in window partition by and sort") {
withTable("customer") {
val rdd = spark.sparkContext.parallelize(customerData)
val customerDF = spark.createDataFrame(rdd, customerSchema)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1182,7 +1182,6 @@ class VeloxTestSettings extends BackendTestSettings {
enableSuite[GlutenDataFrameToSchemaSuite]
enableSuite[GlutenDatasetUnpivotSuite]
enableSuite[GlutenLateralColumnAliasSuite]
.exclude("Aggregate expressions containing no aggregate or grouping expressions still resolves")
enableSuite[GlutenParametersSuite]
enableSuite[GlutenResolveDefaultColumnsSuite]
enableSuite[GlutenSubqueryHintPropagationSuite]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ class GlutenSQLWindowFunctionSuite extends SQLWindowFunctionSuite with GlutenSQL
Row(95337, 12, decimal(915.61))
)

ignoreGluten("Literal in window partition by and sort") {
testGluten("Literal in window partition by and sort") {
withTable("customer") {
val rdd = spark.sparkContext.parallelize(customerData)
val customerDF = spark.createDataFrame(rdd, customerSchema)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1197,7 +1197,6 @@ class VeloxTestSettings extends BackendTestSettings {
enableSuite[GlutenDataFrameToSchemaSuite]
enableSuite[GlutenDatasetUnpivotSuite]
enableSuite[GlutenLateralColumnAliasSuite]
.exclude("Aggregate expressions containing no aggregate or grouping expressions still resolves")
enableSuite[GlutenParametersSuite]
enableSuite[GlutenResolveDefaultColumnsSuite]
enableSuite[GlutenSubqueryHintPropagationSuite]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ class GlutenSQLWindowFunctionSuite extends SQLWindowFunctionSuite with GlutenSQL
Row(95337, 12, decimal(915.61))
)

ignoreGluten("Literal in window partition by and sort") {
testGluten("Literal in window partition by and sort") {
withTable("customer") {
val rdd = spark.sparkContext.parallelize(customerData)
val customerDF = spark.createDataFrame(rdd, customerSchema)
Expand Down Expand Up @@ -93,7 +93,7 @@ class GlutenSQLWindowFunctionSuite extends SQLWindowFunctionSuite with GlutenSQL
}
}

ignoreGluten("Filter on row number") {
testGluten("Filter on row number") {
withTable("customer") {
val rdd = spark.sparkContext.parallelize(customerData)
val customerDF = spark.createDataFrame(rdd, customerSchema)
Expand Down Expand Up @@ -137,7 +137,7 @@ class GlutenSQLWindowFunctionSuite extends SQLWindowFunctionSuite with GlutenSQL
}
}

ignoreGluten("Filter on rank") {
testGluten("Filter on rank") {
withTable("customer") {
val rdd = spark.sparkContext.parallelize(customerData)
val customerDF = spark.createDataFrame(rdd, customerSchema)
Expand Down

0 comments on commit 67c97aa

Please sign in to comment.