[CORE] Refact columnar noop write rule #4200
GitHub Actions / Report test results
failed
Jan 5, 2025 in 0s
64520 tests run, 9273 skipped, 1 failed.
Annotations
Check failure on line 1 in org/apache/spark/sql/connector/GlutenWriteDistributionAndOrderingSuite
github-actions / Report test results
GlutenWriteDistributionAndOrderingSuite.continuous mode allows unspecified distribution and empty ordering
Results do not match for query:
Timezone: sun.util.calendar.ZoneInfo[id="America/Los_Angeles",offset=-28800000,dstSavings=3600000,useDaylight=true,transitions=185,lastRule=java.util.SimpleTimeZone[id=America/Los_Angeles,offset=-28800000,dstSavings=3600000,useDaylight=true,startYear=0,startMode=3,startMonth=2,startDay=8,startDayOfWeek=1,startTime=7200000,startTimeMode=0,endMode=3,endMonth=10,endDay=1,endDayOfWeek=1,endTime=7200000,endTimeMode=0]]
Timezone Env:
== Parsed Logical Plan ==
'UnresolvedRelation [testcat, ns1, test_table], [], false
== Analyzed Logical Plan ==
id: int, data: string, day: date
SubqueryAlias testcat.ns1.test_table
+- RelationV2[id#506583, data#506584, day#506585] testcat.ns1.test_table testcat.ns1.test_table
== Optimized Logical Plan ==
RelationV2[id#506583, data#506584, day#506585] testcat.ns1.test_table
== Physical Plan ==
VeloxColumnarToRow
+- ^(23424) ProjectExecTransformer [id#506583, data#506584, day#506585]
+- ^(23424) InputIteratorTransformer[id#506583, data#506584, day#506585]
+- RowToVeloxColumnar
+- BatchScan testcat.ns1.test_table[id#506583, data#506584, day#506585] class org.apache.spark.sql.connector.catalog.InMemoryBaseTable$InMemoryBatchScan RuntimeFilters: []
== Results ==
== Results ==
!== Correct Answer - 2 == == Spark Answer - 0 ==
struct<> struct<>
![1,a,2021-01-01]
![2,b,2022-02-02]
Raw output
org.scalatest.exceptions.TestFailedException:
Results do not match for query:
Timezone: sun.util.calendar.ZoneInfo[id="America/Los_Angeles",offset=-28800000,dstSavings=3600000,useDaylight=true,transitions=185,lastRule=java.util.SimpleTimeZone[id=America/Los_Angeles,offset=-28800000,dstSavings=3600000,useDaylight=true,startYear=0,startMode=3,startMonth=2,startDay=8,startDayOfWeek=1,startTime=7200000,startTimeMode=0,endMode=3,endMonth=10,endDay=1,endDayOfWeek=1,endTime=7200000,endTimeMode=0]]
Timezone Env:
== Parsed Logical Plan ==
'UnresolvedRelation [testcat, ns1, test_table], [], false
== Analyzed Logical Plan ==
id: int, data: string, day: date
SubqueryAlias testcat.ns1.test_table
+- RelationV2[id#506583, data#506584, day#506585] testcat.ns1.test_table testcat.ns1.test_table
== Optimized Logical Plan ==
RelationV2[id#506583, data#506584, day#506585] testcat.ns1.test_table
== Physical Plan ==
VeloxColumnarToRow
+- ^(23424) ProjectExecTransformer [id#506583, data#506584, day#506585]
+- ^(23424) InputIteratorTransformer[id#506583, data#506584, day#506585]
+- RowToVeloxColumnar
+- BatchScan testcat.ns1.test_table[id#506583, data#506584, day#506585] class org.apache.spark.sql.connector.catalog.InMemoryBaseTable$InMemoryBatchScan RuntimeFilters: []
== Results ==
== Results ==
!== Correct Answer - 2 == == Spark Answer - 0 ==
struct<> struct<>
![1,a,2021-01-01]
![2,b,2022-02-02]
at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:472)
at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:471)
at org.apache.spark.sql.QueryTest$.newAssertionFailedException(QueryTest.scala:234)
at org.scalatest.Assertions.fail(Assertions.scala:933)
at org.scalatest.Assertions.fail$(Assertions.scala:929)
at org.apache.spark.sql.QueryTest$.fail(QueryTest.scala:234)
at org.apache.spark.sql.QueryTest$.checkAnswer(QueryTest.scala:244)
at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:151)
at org.apache.spark.sql.connector.WriteDistributionAndOrderingSuite.$anonfun$new$98(WriteDistributionAndOrderingSuite.scala:1031)
at org.apache.spark.sql.connector.WriteDistributionAndOrderingSuite.$anonfun$new$98$adapted(WriteDistributionAndOrderingSuite.scala:1010)
at org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1(SQLTestUtils.scala:79)
at org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1$adapted(SQLTestUtils.scala:78)
at org.apache.spark.SparkFunSuite.withTempDir(SparkFunSuite.scala:245)
at org.apache.spark.sql.connector.DistributionAndOrderingSuiteBase.org$apache$spark$sql$test$SQLTestUtils$$super$withTempDir(DistributionAndOrderingSuiteBase.scala:33)
at org.apache.spark.sql.test.SQLTestUtils.withTempDir(SQLTestUtils.scala:78)
at org.apache.spark.sql.test.SQLTestUtils.withTempDir$(SQLTestUtils.scala:77)
at org.apache.spark.sql.connector.DistributionAndOrderingSuiteBase.withTempDir(DistributionAndOrderingSuiteBase.scala:33)
at org.apache.spark.sql.connector.WriteDistributionAndOrderingSuite.$anonfun$new$97(WriteDistributionAndOrderingSuite.scala:1010)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
at org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
at org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282)
at org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
at org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
at org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
at org.apache.spark.SparkFunSuite.$anonfun$test$2(SparkFunSuite.scala:155)
at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:227)
at org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
at org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)
at org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:69)
at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
at org.apache.spark.sql.connector.DistributionAndOrderingSuiteBase.org$scalatest$BeforeAndAfter$$super$runTest(DistributionAndOrderingSuiteBase.scala:33)
at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:213)
at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:203)
at org.apache.spark.sql.connector.DistributionAndOrderingSuiteBase.runTest(DistributionAndOrderingSuiteBase.scala:33)
at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
at scala.collection.immutable.List.foreach(List.scala:333)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
at org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269)
at org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268)
at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564)
at org.scalatest.Suite.run(Suite.scala:1114)
at org.scalatest.Suite.run$(Suite.scala:1096)
at org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564)
at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
at org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273)
at org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:69)
at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
at org.apache.spark.sql.connector.DistributionAndOrderingSuiteBase.org$scalatest$BeforeAndAfter$$super$run(DistributionAndOrderingSuiteBase.scala:33)
at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:273)
at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:271)
at org.apache.spark.sql.connector.DistributionAndOrderingSuiteBase.run(DistributionAndOrderingSuiteBase.scala:33)
at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1178)
at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1225)
at scala.collection.ArrayOps$.foreach$extension(ArrayOps.scala:1328)
at org.scalatest.Suite.runNestedSuites(Suite.scala:1223)
at org.scalatest.Suite.runNestedSuites$(Suite.scala:1156)
at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
at org.scalatest.Suite.run(Suite.scala:1111)
at org.scalatest.Suite.run$(Suite.scala:1096)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:47)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1321)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1315)
at scala.collection.immutable.List.foreach(List.scala:333)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1315)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:992)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:970)
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1481)
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:970)
at org.scalatest.tools.Runner$.main(Runner.scala:775)
at org.scalatest.tools.Runner.main(Runner.scala)
Loading