Skip to content

Commit

Permalink
add more test cases for negative scale and higher precision
Browse files Browse the repository at this point in the history
  • Loading branch information
himadripal committed Nov 16, 2024
1 parent 471c2a7 commit 3062de2
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 6 deletions.
25 changes: 23 additions & 2 deletions spark/src/test/scala/org/apache/comet/CometCastSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -896,17 +896,38 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}

test("cast between decimals with different precision and scale") {
// cast between default Decimal(38, 18) to Decimal(9,1)
// cast between default Decimal(38, 18) to Decimal(7,2)
val values = Seq(BigDecimal("12345.6789"), BigDecimal("9876.5432"), BigDecimal("123.4567"))
val df = withNulls(values).toDF("a")
castTest(df, DataTypes.createDecimalType(7, 2))
}

test("cast two between decimals with different precision and scale") {
test("cast between decimals with lower precision and scale") {
// cast between Decimal(10, 2) to Decimal(9,1)
castTest(generateDecimalsPrecision10Scale2(), DataTypes.createDecimalType(9, 1))
}

test("cast between decimals with higher precision than source") {
// cast between Decimal(10, 2) to Decimal(10,4)
withSQLConf("spark.comet.explainFallback.enabled" -> "true") {
castTest(generateDecimalsPrecision10Scale2(), DataTypes.createDecimalType(10, 4))
}
}

test("cast between decimals with negative precision") {
// cast to negative scale
checkSparkMaybeThrows(
spark.sql("select a, cast(a as DECIMAL(10,-4)) from t order by a")) match {
case (expected, actual) =>
assert(expected.contains("PARSE_SYNTAX_ERROR") === actual.contains("PARSE_SYNTAX_ERROR"))
}
}

test("cast between decimals with zero precision") {
// cast between Decimal(10, 2) to Decimal(10,4)
castTest(generateDecimalsPrecision10Scale2(), DataTypes.createDecimalType(10, 0))
}

private def generateFloats(): DataFrame = {
withNulls(gen.generateFloats(dataSize)).toDF("a")
}
Expand Down
6 changes: 2 additions & 4 deletions spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala
Original file line number Diff line number Diff line change
Expand Up @@ -231,11 +231,9 @@ abstract class CometTestBase
df: => DataFrame): (Option[Throwable], Option[Throwable]) = {
var expected: Option[Throwable] = None
withSQLConf(CometConf.COMET_ENABLED.key -> "false") {
val dfSpark = Dataset.ofRows(spark, df.logicalPlan)
expected = Try(dfSpark.collect()).failed.toOption
expected = Try(Dataset.ofRows(spark, df.logicalPlan).collect()).failed.toOption
}
val dfComet = Dataset.ofRows(spark, df.logicalPlan)
val actual = Try(dfComet.collect()).failed.toOption
val actual = Try(Dataset.ofRows(spark, df.logicalPlan).collect()).failed.toOption
(expected, actual)
}

Expand Down

0 comments on commit 3062de2

Please sign in to comment.