Skip to content

Commit

Permalink
fixup
Browse files Browse the repository at this point in the history
  • Loading branch information
zhztheplayer committed Sep 10, 2024
1 parent f9a3b41 commit f96ba7a
Show file tree
Hide file tree
Showing 4 changed files with 46 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
*/
package org.apache.spark.sql.execution.datasources.parquet

import org.apache.gluten.GlutenConfig

import org.apache.spark.SparkConf
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.dsl.expressions._
Expand All @@ -35,9 +37,8 @@ import org.apache.spark.tags.ExtendedSQLTest
import org.apache.spark.util.Utils

import org.apache.hadoop.fs.Path
import org.apache.parquet.filter2.predicate.{FilterApi, FilterPredicate}
import org.apache.parquet.filter2.predicate.{FilterApi, FilterPredicate, Operators}
import org.apache.parquet.filter2.predicate.FilterApi._
import org.apache.parquet.filter2.predicate.Operators
import org.apache.parquet.filter2.predicate.Operators.{Column => _, _}
import org.apache.parquet.hadoop.{ParquetFileReader, ParquetInputFormat, ParquetOutputFormat}
import org.apache.parquet.hadoop.util.HadoopInputFile
Expand All @@ -48,7 +49,13 @@ import java.time.LocalDate
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag

abstract class GltuenParquetFilterSuite extends ParquetFilterSuite with GlutenSQLTestsBaseTrait {
abstract class GlutenParquetFilterSuite extends ParquetFilterSuite with GlutenSQLTestsBaseTrait {
override def sparkConf: SparkConf = {
// https://github.com/apache/incubator-gluten/issues/7174
super.sparkConf
.set(GlutenConfig.COLUMNAR_VELOX_CONNECTOR_IO_THREADS.key, "2")
}

protected def checkFilterPredicate(
predicate: Predicate,
filterClass: Class[_ <: FilterPredicate],
Expand Down Expand Up @@ -357,7 +364,7 @@ abstract class GltuenParquetFilterSuite extends ParquetFilterSuite with GlutenSQ
}

@ExtendedSQLTest
class GlutenParquetV1FilterSuite extends GltuenParquetFilterSuite with GlutenSQLTestsBaseTrait {
class GlutenParquetV1FilterSuite extends GlutenParquetFilterSuite with GlutenSQLTestsBaseTrait {
// TODO: enable Parquet V2 write path after file source V2 writers are workable.
override def sparkConf: SparkConf =
super.sparkConf
Expand Down Expand Up @@ -445,7 +452,7 @@ class GlutenParquetV1FilterSuite extends GltuenParquetFilterSuite with GlutenSQL
}

@ExtendedSQLTest
class GlutenParquetV2FilterSuite extends GltuenParquetFilterSuite with GlutenSQLTestsBaseTrait {
class GlutenParquetV2FilterSuite extends GlutenParquetFilterSuite with GlutenSQLTestsBaseTrait {
// TODO: enable Parquet V2 write path after file source V2 writers are workable.
override def sparkConf: SparkConf =
super.sparkConf
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
*/
package org.apache.spark.sql.execution.datasources.parquet

import org.apache.gluten.GlutenConfig

import org.apache.spark.SparkConf
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.dsl.expressions._
Expand All @@ -35,9 +37,8 @@ import org.apache.spark.tags.ExtendedSQLTest
import org.apache.spark.util.Utils

import org.apache.hadoop.fs.Path
import org.apache.parquet.filter2.predicate.{FilterApi, FilterPredicate}
import org.apache.parquet.filter2.predicate.{FilterApi, FilterPredicate, Operators}
import org.apache.parquet.filter2.predicate.FilterApi._
import org.apache.parquet.filter2.predicate.Operators
import org.apache.parquet.filter2.predicate.Operators.{Column => _, Eq, Gt, GtEq, Lt, LtEq, NotEq}
import org.apache.parquet.hadoop.{ParquetFileReader, ParquetInputFormat, ParquetOutputFormat}
import org.apache.parquet.hadoop.util.HadoopInputFile
Expand All @@ -48,7 +49,13 @@ import java.time.LocalDate
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag

abstract class GltuenParquetFilterSuite extends ParquetFilterSuite with GlutenSQLTestsBaseTrait {
abstract class GlutenParquetFilterSuite extends ParquetFilterSuite with GlutenSQLTestsBaseTrait {
override def sparkConf: SparkConf = {
// https://github.com/apache/incubator-gluten/issues/7174
super.sparkConf
.set(GlutenConfig.COLUMNAR_VELOX_CONNECTOR_IO_THREADS.key, "2")
}

protected def checkFilterPredicate(
predicate: Predicate,
filterClass: Class[_ <: FilterPredicate],
Expand Down Expand Up @@ -328,7 +335,7 @@ abstract class GltuenParquetFilterSuite extends ParquetFilterSuite with GlutenSQ
}

@ExtendedSQLTest
class GlutenParquetV1FilterSuite extends GltuenParquetFilterSuite with GlutenSQLTestsBaseTrait {
class GlutenParquetV1FilterSuite extends GlutenParquetFilterSuite with GlutenSQLTestsBaseTrait {
// TODO: enable Parquet V2 write path after file source V2 writers are workable.
override def sparkConf: SparkConf =
super.sparkConf
Expand Down Expand Up @@ -416,7 +423,7 @@ class GlutenParquetV1FilterSuite extends GltuenParquetFilterSuite with GlutenSQL
}

@ExtendedSQLTest
class GlutenParquetV2FilterSuite extends GltuenParquetFilterSuite with GlutenSQLTestsBaseTrait {
class GlutenParquetV2FilterSuite extends GlutenParquetFilterSuite with GlutenSQLTestsBaseTrait {
// TODO: enable Parquet V2 write path after file source V2 writers are workable.
override def sparkConf: SparkConf =
super.sparkConf
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
*/
package org.apache.spark.sql.execution.datasources.parquet

import org.apache.gluten.GlutenConfig

import org.apache.spark.SparkConf
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.dsl.expressions._
Expand Down Expand Up @@ -47,7 +49,13 @@ import java.time.LocalDate
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag

abstract class GltuenParquetFilterSuite extends ParquetFilterSuite with GlutenSQLTestsBaseTrait {
abstract class GlutenParquetFilterSuite extends ParquetFilterSuite with GlutenSQLTestsBaseTrait {
override def sparkConf: SparkConf = {
// https://github.com/apache/incubator-gluten/issues/7174
super.sparkConf
.set(GlutenConfig.COLUMNAR_VELOX_CONNECTOR_IO_THREADS.key, "2")
}

protected def checkFilterPredicate(
predicate: Predicate,
filterClass: Class[_ <: FilterPredicate],
Expand Down Expand Up @@ -328,7 +336,7 @@ abstract class GltuenParquetFilterSuite extends ParquetFilterSuite with GlutenSQ
}

@ExtendedSQLTest
class GlutenParquetV1FilterSuite extends GltuenParquetFilterSuite with GlutenSQLTestsBaseTrait {
class GlutenParquetV1FilterSuite extends GlutenParquetFilterSuite with GlutenSQLTestsBaseTrait {
// TODO: enable Parquet V2 write path after file source V2 writers are workable.
override def sparkConf: SparkConf =
super.sparkConf
Expand Down Expand Up @@ -416,7 +424,7 @@ class GlutenParquetV1FilterSuite extends GltuenParquetFilterSuite with GlutenSQL
}

@ExtendedSQLTest
class GlutenParquetV2FilterSuite extends GltuenParquetFilterSuite with GlutenSQLTestsBaseTrait {
class GlutenParquetV2FilterSuite extends GlutenParquetFilterSuite with GlutenSQLTestsBaseTrait {
// TODO: enable Parquet V2 write path after file source V2 writers are workable.
override def sparkConf: SparkConf =
super.sparkConf
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
*/
package org.apache.spark.sql.execution.datasources.parquet

import org.apache.gluten.GlutenConfig

import org.apache.spark.SparkConf
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.dsl.expressions._
Expand Down Expand Up @@ -47,7 +49,13 @@ import java.time.LocalDate
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag

abstract class GltuenParquetFilterSuite extends ParquetFilterSuite with GlutenSQLTestsBaseTrait {
abstract class GlutenParquetFilterSuite extends ParquetFilterSuite with GlutenSQLTestsBaseTrait {
override def sparkConf: SparkConf = {
// https://github.com/apache/incubator-gluten/issues/7174
super.sparkConf
.set(GlutenConfig.COLUMNAR_VELOX_CONNECTOR_IO_THREADS.key, "2")
}

protected def checkFilterPredicate(
predicate: Predicate,
filterClass: Class[_ <: FilterPredicate],
Expand Down Expand Up @@ -328,7 +336,7 @@ abstract class GltuenParquetFilterSuite extends ParquetFilterSuite with GlutenSQ
}

@ExtendedSQLTest
class GlutenParquetV1FilterSuite extends GltuenParquetFilterSuite with GlutenSQLTestsBaseTrait {
class GlutenParquetV1FilterSuite extends GlutenParquetFilterSuite with GlutenSQLTestsBaseTrait {
// TODO: enable Parquet V2 write path after file source V2 writers are workable.
override def sparkConf: SparkConf =
super.sparkConf
Expand Down Expand Up @@ -416,7 +424,7 @@ class GlutenParquetV1FilterSuite extends GltuenParquetFilterSuite with GlutenSQL
}

@ExtendedSQLTest
class GlutenParquetV2FilterSuite extends GltuenParquetFilterSuite with GlutenSQLTestsBaseTrait {
class GlutenParquetV2FilterSuite extends GlutenParquetFilterSuite with GlutenSQLTestsBaseTrait {
// TODO: enable Parquet V2 write path after file source V2 writers are workable.
override def sparkConf: SparkConf =
super.sparkConf
Expand Down

0 comments on commit f96ba7a

Please sign in to comment.