Skip to content
Permalink

Comparing changes

This is a direct comparison between two commits made in this repository or its related repositories. View the default comparison for this range or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: apache/incubator-gluten
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: a4fb8918110474f91be3bc3ccd2d063419b002b5
Choose a base ref
..
head repository: apache/incubator-gluten
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: dccfd87a7598d6f2a89575cf64e3cec8afd7cb52
Choose a head ref
Original file line number Diff line number Diff line change
@@ -16,14 +16,22 @@
*/
package org.apache.spark.sql.execution

import org.apache.gluten.GlutenConfig
import org.apache.gluten.execution.{BasicScanExecTransformer, VeloxWholeStageTransformerSuite}

import org.apache.spark.SparkConf

import java.io.File

class VeloxParquetReadSuite extends VeloxWholeStageTransformerSuite {
override protected val resourcePath: String = "/parquet-for-read"
override protected val fileFormat: String = "parquet"

override protected def sparkConf: SparkConf = {
super.sparkConf
.set(GlutenConfig.LOAD_QUANTUM.key, "128m")
}

testWithSpecifiedSparkVersion("read example parquet files", Some("3.5"), Some("3.5")) {
withTable("test_table") {
val dir = new File(getClass.getResource(resourcePath).getFile)
4 changes: 2 additions & 2 deletions cpp-ch/clickhouse.version
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
CH_ORG=Kyligence
CH_BRANCH=rebase_ch/20240423
CH_COMMIT=589765ed71d
CH_BRANCH=rebase_ch/20240430
CH_COMMIT=d9f83b5b48f
2 changes: 1 addition & 1 deletion cpp-ch/local-engine/tests/gtest_ch_storages.cpp
Original file line number Diff line number Diff line change
@@ -266,7 +266,7 @@ TEST(TestPrewhere, OptimizePrewhereCondition)
SerializedPlanParser * parser = new SerializedPlanParser(context);
parser->parseExtensions(plan_ptr->extensions());

MergeTreeRelParser mergeTreeParser(parser, context, parser->query_context, SerializedPlanParser::global_context);
MergeTreeRelParser mergeTreeParser(parser, SerializedPlanParser::global_context);

mergeTreeParser.column_sizes["l_discount"] = 0;
mergeTreeParser.column_sizes["l_quantity"] = 1;
1 change: 1 addition & 0 deletions docs/developers/docker_centos7.md
Original file line number Diff line number Diff line change
@@ -25,6 +25,7 @@ yum -y install \
java-1.8.0-openjdk-devel \
ninja-build \
wget \
ca-certificates \
sudo
# gluten need maven version >=3.6.3
Original file line number Diff line number Diff line change
@@ -21,11 +21,27 @@ import org.apache.spark.internal.config.ConfigReader
import scala.collection.JavaConverters._

object GlutenConfigUtil {
private def getConfString(reader: ConfigReader, key: String, value: String): String = {
Option(SQLConf.getConfigEntry(key))
.map {
_.readFrom(reader) match {
case o: Option[_] => o.map(_.toString).getOrElse(value)
case null => value
case v => v.toString
}
}
.getOrElse(value)
}

def parseConfig(conf: Map[String, String]): Map[String, String] = {
val reader = new ConfigReader(conf.filter(_._1.contains("spark.gluten.")).asJava)
val glutenConfigEntries =
SQLConf.getConfigEntries().asScala.filter(e => e.key.contains("spark.gluten."))
val glutenConfig = glutenConfigEntries.map(e => (e.key, e.readFrom(reader).toString)).toMap
conf.map(e => (e._1, glutenConfig.getOrElse(e._1, e._2)))
val reader = new ConfigReader(conf.filter(_._1.startsWith("spark.gluten.")).asJava)
conf.map {
case (k, v) =>
if (k.startsWith("spark.gluten.")) {
(k, getConfString(reader, k, v))
} else {
(k, v)
}
}.toMap
}
}