diff --git a/tools/gluten-it/common/src/main/java/org/apache/gluten/integration/command/Parameterized.java b/tools/gluten-it/common/src/main/java/org/apache/gluten/integration/command/Parameterized.java index 7e1234e7665d..cadff0a2db91 100644 --- a/tools/gluten-it/common/src/main/java/org/apache/gluten/integration/command/Parameterized.java +++ b/tools/gluten-it/common/src/main/java/org/apache/gluten/integration/command/Parameterized.java @@ -65,7 +65,7 @@ public class Parameterized implements Callable { @Override public Integer call() throws Exception { - final Map>>> parsed = new HashMap<>(); + final Map>>> parsed = new LinkedHashMap<>(); final Seq> excludedCombinations = JavaConverters.asScalaBufferConverter(Arrays.stream(excludedDims).map(d -> { final Matcher m = excludedDimsPattern.matcher(d); diff --git a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Parameterized.scala b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Parameterized.scala index 8f5bc0946643..e2fc526ce566 100644 --- a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Parameterized.scala +++ b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Parameterized.scala @@ -18,12 +18,14 @@ package org.apache.gluten.integration.action import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.gluten.integration.action.Actions.QuerySelector +import org.apache.gluten.integration.action.TableRender.Field import org.apache.gluten.integration.action.TableRender.RowParser.FieldAppender.RowAppender import org.apache.gluten.integration.stat.RamStat import org.apache.gluten.integration.{QueryRunner, Suite, TableCreator} import org.apache.spark.sql.ConfUtils.ConfImplicits._ import org.apache.spark.sql.SparkSessionSwitcher +import java.util.concurrent.atomic.AtomicInteger import scala.collection.mutable import scala.collection.mutable.ListBuffer @@ -39,6 +41,8 @@ class Parameterized( metrics: Array[String]) extends Action { + validateDims(configDimensions) + private def validateDims(configDimensions: Seq[Dim]): Unit = { if (configDimensions .map(dim => { @@ -57,32 +61,33 @@ class Parameterized( } private val coordinates: mutable.LinkedHashMap[Coordinate, Seq[(String, String)]] = { - validateDims(configDimensions) val dimCount = configDimensions.size val coordinateMap = mutable.LinkedHashMap[Coordinate, Seq[(String, String)]]() + val nextId: AtomicInteger = new AtomicInteger(1); def fillCoordinates( dimOffset: Int, - intermediateCoordinates: Map[String, String], + intermediateCoordinate: Map[String, String], intermediateConf: Seq[(String, String)]): Unit = { if (dimOffset == dimCount) { // we got one coordinate excludedCombinations.foreach { ec: Set[DimKv] => if (ec.forall { kv => - intermediateCoordinates.contains(kv.k) && intermediateCoordinates(kv.k) == kv.v + intermediateCoordinate.contains(kv.k) && intermediateCoordinate(kv.k) == kv.v }) { - println(s"Coordinate ${Coordinate(intermediateCoordinates)} excluded by $ec.") + println(s"Coordinate ${intermediateCoordinate} excluded by $ec.") return } } - coordinateMap(Coordinate(intermediateCoordinates)) = intermediateConf + coordinateMap(Coordinate(nextId.getAndIncrement(), intermediateCoordinate)) = + intermediateConf return } val dim = configDimensions(dimOffset) dim.dimValues.foreach { dimValue => fillCoordinates( dimOffset + 1, - intermediateCoordinates + (dim.name -> dimValue.name), + intermediateCoordinate + (dim.name -> dimValue.name), intermediateConf ++ dimValue.conf) } } @@ -95,7 +100,6 @@ class Parameterized( override def execute(suite: Suite): Boolean = { val runner: QueryRunner = new QueryRunner(suite.queryResource(), suite.dataWritePath(scale, genPartitionedData)) - val allQueries = suite.allQueryIds() val sessionSwitcher = suite.sessionSwitcher val testConf = suite.getTestConf() @@ -116,36 +120,40 @@ class Parameterized( val runQueryIds = queries.select(suite) - // warm up - (0 until warmupIterations).foreach { _ => - runQueryIds.foreach { queryId => - Parameterized.warmUp(suite.tableCreator(), queryId, suite.desc(), sessionSwitcher, runner) - } - } - - val results = coordinates.flatMap { entry => - val coordinate = entry._1 - val coordinateResults = (0 until iterations).flatMap { iteration => - println(s"Running tests (iteration $iteration) with coordinate $coordinate...") - runQueryIds.map { queryId => - Parameterized.runQuery( - runner, - suite.tableCreator(), - sessionSwitcher, + val results = (0 until iterations).flatMap { iteration => + runQueryIds.map { queryId => + val queryResult = + TestResultLine( queryId, - coordinate, - suite.desc(), - explain, - metrics) - } - }.toList - coordinateResults + coordinates.map { entry => + val coordinate = entry._1 + println(s"Running tests (iteration $iteration) with coordinate $coordinate...") + // warm up + (0 until warmupIterations).foreach { _ => + Parameterized.warmUp( + runner, + suite.tableCreator(), + sessionSwitcher, + queryId, + suite.desc()) + } + // run + Parameterized.runQuery( + runner, + suite.tableCreator(), + sessionSwitcher, + queryId, + coordinate, + suite.desc(), + explain, + metrics) + }.toList) + queryResult + } } - val dimNames = configDimensions.map(dim => dim.name) - - val passedCount = results.count(l => l.succeed) - val count = results.count(_ => true) + val succeededCount = results.count(l => l.succeeded()) + val totalCount = results.count(_ => true) // RAM stats println("Performing GC to collect RAM statistics... ") @@ -160,22 +168,37 @@ class Parameterized( println("") println("Test report: ") println("") - printf("Summary: %d out of %d queries passed. \n", passedCount, count) + printf( + "Summary: %d out of %d queries successfully run on all config combinations. \n", + succeededCount, + totalCount) println("") - TestResultLines(dimNames, metrics, results.filter(_.succeed)).print() + println("Configurations:") + coordinates.foreach { coord => + println(s"${coord._1.id}. ${coord._1}") + } + println("") + val succeeded = results.filter(_.succeeded()) + TestResultLines( + coordinates.size, + configDimensions, + metrics, + succeeded ++ TestResultLine.aggregate("all", succeeded)) + .print() println("") - if (passedCount == count) { + if (succeededCount == totalCount) { println("No failed queries. ") println("") } else { println("Failed queries: ") println("") - TestResultLines(dimNames, metrics, results.filter(!_.succeed)).print() + TestResultLines(coordinates.size, configDimensions, metrics, results.filter(!_.succeeded())) + .print() println("") } - if (passedCount != count) { + if (succeededCount != totalCount) { return false } true @@ -185,56 +208,84 @@ class Parameterized( case class DimKv(k: String, v: String) case class Dim(name: String, dimValues: Seq[DimValue]) case class DimValue(name: String, conf: Seq[(String, String)]) -case class Coordinate(coordinate: Map[String, String]) // [dim, dim value] - -case class TestResultLine( - queryId: String, - succeed: Boolean, - coordinate: Coordinate, - rowCount: Option[Long], - planningTimeMillis: Option[Long], - executionTimeMillis: Option[Long], - metrics: Map[String, Long], - errorMessage: Option[String]) +// coordinate: [dim, dim value] +case class Coordinate(id: Int, coordinate: Map[String, String]) { + override def toString: String = coordinate.mkString(", ") +} + +case class TestResultLine(queryId: String, coordinates: Seq[TestResultLine.Coord]) { + def succeeded(): Boolean = { + coordinates.forall(_.succeeded) + } +} object TestResultLine { - class Parser(dimNames: Seq[String], metricNames: Seq[String]) - extends TableRender.RowParser[TestResultLine] { + case class Coord( + coordinate: Coordinate, + succeeded: Boolean, + rowCount: Option[Long], + planningTimeMillis: Option[Long], + executionTimeMillis: Option[Long], + metrics: Map[String, Long], + errorMessage: Option[String]) + + class Parser(metricNames: Seq[String]) extends TableRender.RowParser[TestResultLine] { override def parse(rowAppender: RowAppender, line: TestResultLine): Unit = { val inc = rowAppender.incremental() inc.next().write(line.queryId) - inc.next().write(line.succeed) - dimNames.foreach { dimName => - val coordinate = line.coordinate.coordinate - if (!coordinate.contains(dimName)) { - throw new IllegalStateException("Dimension name not found" + dimName) - } - inc.next().write(coordinate(dimName)) - } - metricNames.foreach { metricName => - val metrics = line.metrics - inc.next().write(metrics.getOrElse(metricName, "N/A")) - } - inc.next().write(line.rowCount.getOrElse("N/A")) - inc.next().write(line.planningTimeMillis.getOrElse("N/A")) - inc.next().write(line.executionTimeMillis.getOrElse("N/A")) + val coords = line.coordinates + coords.foreach(coord => inc.next().write(coord.succeeded)) + coords.foreach(coord => inc.next().write(coord.rowCount)) + metricNames.foreach(metricName => + coords.foreach(coord => inc.next().write(coord.metrics(metricName)))) + coords.foreach(coord => inc.next().write(coord.planningTimeMillis)) + coords.foreach(coord => inc.next().write(coord.executionTimeMillis)) + } + } + + def aggregate(name: String, lines: Iterable[TestResultLine]): Iterable[TestResultLine] = { + if (lines.isEmpty) { + return Nil + } + + if (lines.size == 1) { + return Nil } + + List(lines.reduce { (left, right) => + TestResultLine(name, left.coordinates.zip(right.coordinates).map { + case (leftCoord, rightCoord) => + assert(leftCoord.coordinate == rightCoord.coordinate) + Coord( + leftCoord.coordinate, + leftCoord.succeeded && rightCoord.succeeded, + (leftCoord.rowCount, rightCoord.rowCount).onBothProvided(_ + _), + (leftCoord.planningTimeMillis, rightCoord.planningTimeMillis).onBothProvided(_ + _), + (leftCoord.executionTimeMillis, rightCoord.executionTimeMillis).onBothProvided(_ + _), + (leftCoord.metrics, rightCoord.metrics).sumUp, + (leftCoord.errorMessage ++ rightCoord.errorMessage).reduceOption(_ + ", " + _)) + }) + }) } } case class TestResultLines( - dimNames: Seq[String], + coordCount: Int, + configDimensions: Seq[Dim], metricNames: Seq[String], lines: Iterable[TestResultLine]) { def print(): Unit = { - val fields = ListBuffer[String]("Query ID", "Succeeded") - dimNames.foreach(dimName => fields.append(dimName)) - metricNames.foreach(metricName => fields.append(metricName)) - fields.append("Row Count") - fields.append("Planning Time (Millis)") - fields.append("Query Time (Millis)") - val render = TableRender.plain[TestResultLine](fields: _*)( - new TestResultLine.Parser(dimNames, metricNames)) + val fields = ListBuffer[Field](Field.Leaf("Query ID")) + val coordFields = (1 to coordCount).map(id => Field.Leaf(id.toString)) + + fields.append(Field.Branch("Succeeded", coordFields)) + fields.append(Field.Branch("Row Count", coordFields)) + metricNames.foreach(metricName => fields.append(Field.Branch(metricName, coordFields))) + fields.append(Field.Branch("Planning Time (Millis)", coordFields)) + fields.append(Field.Branch("Query Time (Millis)", coordFields)) + + val render = + TableRender.create[TestResultLine](fields: _*)(new TestResultLine.Parser(metricNames)) lines.foreach { line => render.appendRow(line) @@ -253,10 +304,10 @@ object Parameterized { coordinate: Coordinate, desc: String, explain: Boolean, - metrics: Array[String]) = { + metrics: Array[String]): TestResultLine.Coord = { println(s"Running query: $id...") try { - val testDesc = "Gluten Spark %s %s %s".format(desc, id, coordinate) + val testDesc = "Gluten Spark %s [%s] %s".format(desc, id, coordinate) sessionSwitcher.useSession(coordinate.toString, testDesc) runner.createTables(creator, sessionSwitcher.spark()) val result = @@ -265,10 +316,9 @@ object Parameterized { println( s"Successfully ran query $id. " + s"Returned row count: ${resultRows.length}") - TestResultLine( - id, - succeed = true, + TestResultLine.Coord( coordinate, + succeeded = true, Some(resultRows.length), Some(result.planningTimeMillis), Some(result.executionTimeMillis), @@ -280,16 +330,16 @@ object Parameterized { println( s"Error running query $id. " + s" Error: ${error.get}") - TestResultLine(id, succeed = false, coordinate, None, None, None, Map.empty, error) + TestResultLine.Coord(coordinate, succeeded = false, None, None, None, Map.empty, error) } } - private[integration] def warmUp( + private def warmUp( + runner: QueryRunner, creator: TableCreator, - id: String, - desc: String, sessionSwitcher: SparkSessionSwitcher, - runner: QueryRunner): Unit = { + id: String, + desc: String): Unit = { println(s"Warming up: Running query: $id...") try { val testDesc = "Gluten Spark %s %s warm up".format(desc, id) diff --git a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Queries.scala b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Queries.scala index 540abbf454c3..de09d925e4d2 100644 --- a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Queries.scala +++ b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/Queries.scala @@ -114,9 +114,9 @@ object Queries { val inc = rowAppender.incremental() inc.next().write(line.queryId) inc.next().write(line.testPassed) - inc.next().write(line.rowCount.getOrElse("N/A")) - inc.next().write(line.planningTimeMillis.getOrElse("N/A")) - inc.next().write(line.executionTimeMillis.getOrElse("N/A")) + inc.next().write(line.rowCount) + inc.next().write(line.planningTimeMillis) + inc.next().write(line.executionTimeMillis) } } } diff --git a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/QueriesCompare.scala b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/QueriesCompare.scala index 596c293e473e..d7b6ffff893c 100644 --- a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/QueriesCompare.scala +++ b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/QueriesCompare.scala @@ -81,10 +81,10 @@ case class QueriesCompare( println("") } - var all = QueriesCompare.aggregate(results, "all") + var all = QueriesCompare.aggregate("all", results) if (passedCount != count) { - all = QueriesCompare.aggregate(succeed, "succeeded") ::: all + all = QueriesCompare.aggregate("succeeded", succeed) ::: all } println("Overall: ") @@ -123,13 +123,13 @@ object QueriesCompare { } else None inc.next().write(line.queryId) inc.next().write(line.testPassed) - inc.next().write(line.expectedRowCount.getOrElse("N/A")) - inc.next().write(line.actualRowCount.getOrElse("N/A")) - inc.next().write(line.expectedPlanningTimeMillis.getOrElse("N/A")) - inc.next().write(line.actualPlanningTimeMillis.getOrElse("N/A")) - inc.next().write(line.expectedExecutionTimeMillis.getOrElse("N/A")) - inc.next().write(line.actualExecutionTimeMillis.getOrElse("N/A")) - inc.next().write(speedUp.map("%.2f%%".format(_)).getOrElse("N/A")) + inc.next().write(line.expectedRowCount) + inc.next().write(line.actualRowCount) + inc.next().write(line.expectedPlanningTimeMillis) + inc.next().write(line.actualPlanningTimeMillis) + inc.next().write(line.expectedExecutionTimeMillis) + inc.next().write(line.actualExecutionTimeMillis) + inc.next().write(speedUp.map("%.2f%%".format(_))) } } } @@ -152,7 +152,7 @@ object QueriesCompare { render.print(System.out) } - private def aggregate(succeed: List[TestResultLine], name: String): List[TestResultLine] = { + private def aggregate(name: String, succeed: List[TestResultLine]): List[TestResultLine] = { if (succeed.isEmpty) { return Nil } @@ -160,25 +160,13 @@ object QueriesCompare { succeed.reduce((r1, r2) => TestResultLine( name, - testPassed = true, - if (r1.expectedRowCount.nonEmpty && r2.expectedRowCount.nonEmpty) - Some(r1.expectedRowCount.get + r2.expectedRowCount.get) - else None, - if (r1.actualRowCount.nonEmpty && r2.actualRowCount.nonEmpty) - Some(r1.actualRowCount.get + r2.actualRowCount.get) - else None, - if (r1.expectedPlanningTimeMillis.nonEmpty && r2.expectedPlanningTimeMillis.nonEmpty) - Some(r1.expectedPlanningTimeMillis.get + r2.expectedPlanningTimeMillis.get) - else None, - if (r1.actualPlanningTimeMillis.nonEmpty && r2.actualPlanningTimeMillis.nonEmpty) - Some(r1.actualPlanningTimeMillis.get + r2.actualPlanningTimeMillis.get) - else None, - if (r1.expectedExecutionTimeMillis.nonEmpty && r2.expectedExecutionTimeMillis.nonEmpty) - Some(r1.expectedExecutionTimeMillis.get + r2.expectedExecutionTimeMillis.get) - else None, - if (r1.actualExecutionTimeMillis.nonEmpty && r2.actualExecutionTimeMillis.nonEmpty) - Some(r1.actualExecutionTimeMillis.get + r2.actualExecutionTimeMillis.get) - else None, + r1.testPassed && r2.testPassed, + (r1.expectedRowCount, r2.expectedRowCount).onBothProvided(_ + _), + (r1.actualRowCount, r2.actualRowCount).onBothProvided(_ + _), + (r1.expectedPlanningTimeMillis, r2.expectedPlanningTimeMillis).onBothProvided(_ + _), + (r1.actualPlanningTimeMillis, r2.actualPlanningTimeMillis).onBothProvided(_ + _), + (r1.expectedExecutionTimeMillis, r2.expectedExecutionTimeMillis).onBothProvided(_ + _), + (r1.actualExecutionTimeMillis, r2.actualExecutionTimeMillis).onBothProvided(_ + _), None))) } diff --git a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/TableRender.scala b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/TableRender.scala index 4cded2848b6e..2b1cca61e3f4 100644 --- a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/TableRender.scala +++ b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/TableRender.scala @@ -20,7 +20,7 @@ package org.apache.gluten.integration.action import org.apache.commons.lang3.StringUtils import org.apache.gluten.integration.action.TableRender.RowParser.FieldAppender.RowAppender -import java.io.{OutputStream, PrintStream} +import java.io.{ByteArrayOutputStream, OutputStream, PrintStream} import scala.collection.mutable trait TableRender[ROW <: Any] { @@ -31,7 +31,8 @@ trait TableRender[ROW <: Any] { object TableRender { def create[ROW <: Any](fields: Field*)(implicit parser: RowParser[ROW]): TableRender[ROW] = { assert(fields.nonEmpty) - new Impl[ROW](Schema(fields), parser) + // Deep copy to avoid duplications (In case caller reuses a sub-tree). + new Impl[ROW](Schema(fields.map(_.makeCopy())), parser) } def plain[ROW <: Any](fields: String*)(implicit parser: RowParser[ROW]): TableRender[ROW] = { @@ -40,8 +41,10 @@ object TableRender { } trait Field { + def id(): Int = System.identityHashCode(this) def name: String def leafs: Seq[Field.Leaf] + def makeCopy(): Field } object Field { @@ -57,9 +60,12 @@ object TableRender { children.map(child => leafsOf(child)).reduce(_ ++ _) } } + + override def makeCopy(): Field = copy(name, children.map(_.makeCopy())) } case class Leaf(override val name: String) extends Field { override val leafs: Seq[Leaf] = List(this) + override def makeCopy(): Field = copy() } } @@ -109,7 +115,7 @@ object TableRender { schema.leafs.zipWithIndex.foreach { case (leaf, i) => val dataWidth = dataWidths(i) - widthMap += (System.identityHashCode(leaf) -> (dataWidth max (leaf.name.length + 2))) + widthMap += (leaf.id() -> (dataWidth max (leaf.name.length + 2))) } schema.fields.foreach { root => @@ -122,12 +128,12 @@ object TableRender { .toInt children.foreach(child => updateWidth(child, leafLowerBound * child.leafs.size)) val childrenWidth = - children.map(child => widthMap(System.identityHashCode(child))).sum + children.map(child => widthMap(child.id())).sum val width = childrenWidth + children.size - 1 - val hash = System.identityHashCode(branch) + val hash = branch.id() widthMap += hash -> width case leaf @ Field.Leaf(name) => - val hash = System.identityHashCode(leaf) + val hash = leaf.id() val newWidth = widthMap(hash) max lowerBound widthMap.put(hash, newWidth) case _ => new IllegalStateException() @@ -146,9 +152,9 @@ object TableRender { val schemaLine = cells .map { case Given(field) => - (field.name, widthMap(System.identityHashCode(field))) + (field.name, widthMap(field.id())) case PlaceHolder(leaf) => - ("", widthMap(System.identityHashCode(leaf))) + ("", widthMap(leaf.id())) } .map { case (name, width) => @@ -168,7 +174,7 @@ object TableRender { val separationLine = schema.leafs .map { leaf => - widthMap(System.identityHashCode(leaf)) + widthMap(leaf.id()) } .map { width => new String(Array.tabulate(width)(_ => '-')) @@ -182,7 +188,7 @@ object TableRender { .zip(schema.leafs) .map { case (value, leaf) => - (value, widthMap(System.identityHashCode(leaf))) + (value, widthMap(leaf.id())) } .map { case (value, width) => @@ -194,6 +200,12 @@ object TableRender { printer.flush() } + + override def toString: String = { + val out = new ByteArrayOutputStream() + print(out) + out.toString + } } trait RowParser[ROW <: Any] { @@ -302,7 +314,13 @@ object TableRender { override def write(value: Any): Unit = { assert(field.isInstanceOf[Field.Leaf]) - mutableRow(column) = value.toString + mutableRow(column) = toString(value) + } + + private def toString(value: Any): String = value match { + case Some(v) => toString(v) + case None => "N/A" + case other => other.toString } } } diff --git a/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/package.scala b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/package.scala new file mode 100644 index 000000000000..6046ae4aaa35 --- /dev/null +++ b/tools/gluten-it/common/src/main/scala/org/apache/gluten/integration/action/package.scala @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.gluten.integration + +package object action { + implicit class DualOptionsOps[T](value: (Option[T], Option[T])) { + def onBothProvided[R](func: (T, T) => R): Option[R] = { + if (value._1.isEmpty || value._2.isEmpty) { + return None + } + Some(func(value._1.get, value._2.get)) + } + } + + implicit class DualMetricsOps(value: (Map[String, Long], Map[String, Long])) { + def sumUp: Map[String, Long] = { + assert(value._1.keySet == value._2.keySet) + value._1.map { case (k, v) => k -> (v + value._2(k)) } + } + } +} diff --git a/tools/gluten-it/common/src/test/java/org/apache/gluten/integration/action/TableRenderTest.scala b/tools/gluten-it/common/src/test/java/org/apache/gluten/integration/action/TableRenderTest.scala index ce7b0974ce8b..1efc72148928 100644 --- a/tools/gluten-it/common/src/test/java/org/apache/gluten/integration/action/TableRenderTest.scala +++ b/tools/gluten-it/common/src/test/java/org/apache/gluten/integration/action/TableRenderTest.scala @@ -99,11 +99,32 @@ object TableRenderTest { Console.out.println() } + def case5(): Unit = { + val leafs = List(Leaf("1"), Leaf("2"), Leaf("3"), Leaf("4")) + val render: TableRender[Seq[String]] = TableRender.create( + Leaf("Query ID"), + Branch("Succeeded", leafs), + Branch("Row Count", leafs))(new RowParser[Seq[String]] { + override def parse(rowFactory: FieldAppender.RowAppender, row: Seq[String]): Unit = { + val inc = rowFactory.incremental() + row.foreach(ceil => inc.next().write(ceil)) + } + }) + + render.appendRow( + List("q1", "true", "true", "true && true && true && true", "true", "1", "1", "1", "1")) + render.appendRow( + List("q2", "true", "true", "true", "true", "100000", "100000", "100000", "100000")) + render.print(Console.out) + Console.out.println() + } + def main(args: Array[String]): Unit = { case0() case1() case2() case3() case4() + case5() } }