Skip to content

Commit

Permalink
[GLUTEN-7437][CH]Revert "Auxiliary commit to revert individual files …
Browse files Browse the repository at this point in the history
…from (apache#7438)

This reverts commit 1299b730512e3387fea550604dd94150a380a3be.
  • Loading branch information
baibaichen authored and shamirchen committed Oct 14, 2024
1 parent dd5fc3e commit 8caba5d
Show file tree
Hide file tree
Showing 8 changed files with 8 additions and 149 deletions.
2 changes: 0 additions & 2 deletions backends-clickhouse/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,6 @@
<include>src/test/scala/**/*.scala</include>
<include>src/main/delta-${delta.binary.version}/**/*.scala</include>
<include>src/test/delta-${delta.binary.version}/**/*.scala</include>
<include>src/main/${sparkshim.module.name}/**/*.scala</include>
</includes>
<excludes>
<exclude>src/main/delta-${delta.binary.version}/org/apache/spark/sql/delta/commands/*.scala</exclude>
Expand Down Expand Up @@ -398,7 +397,6 @@
<configuration>
<sources>
<source>src/main/delta-${delta.binary.version}</source>
<source>src/main/${sparkshim.module.name}</source>
</sources>
</configuration>
</execution>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,9 @@ class ClickHouseTableV2(
StorageMeta.withMoreStorageInfo(
meta,
ClickhouseSnapshot.genSnapshotId(initialSnapshot),
deltaLog.dataPath))
deltaLog.dataPath,
dataBaseName,
tableName))
}

override def deltaProperties: Map[String, String] = properties().asScala.toMap
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.gluten.backendsapi.clickhouse
import org.apache.gluten.backendsapi.RuleApi
import org.apache.gluten.extension._
import org.apache.gluten.extension.columnar._
import org.apache.gluten.extension.columnar.MiscColumnarRules._
import org.apache.gluten.extension.columnar.MiscColumnarRules.{RemoveGlutenTableCacheColumnarToRow, RemoveTopmostColumnarToRow, RewriteSubqueryBroadcast, TransformPreOverrides}
import org.apache.gluten.extension.columnar.rewrite.RewriteSparkPlanRulesManager
import org.apache.gluten.extension.columnar.transition.{InsertTransitions, RemoveTransitions}
import org.apache.gluten.extension.injector.{RuleInjector, SparkInjector}
Expand All @@ -28,7 +28,7 @@ import org.apache.gluten.parser.{GlutenCacheFilesSqlParser, GlutenClickhouseSqlP
import org.apache.gluten.sql.shims.SparkShimLoader
import org.apache.gluten.utils.PhysicalPlanSelector

import org.apache.spark.sql.catalyst._
import org.apache.spark.sql.catalyst.{CHAggregateFunctionRewriteRule, EqualToRewrite}
import org.apache.spark.sql.execution.{ColumnarCollapseTransformStages, GlutenFallbackReporter}
import org.apache.spark.util.SparkPlanRules

Expand All @@ -44,7 +44,7 @@ class CHRuleApi extends RuleApi {
}

private object CHRuleApi {
private def injectSpark(injector: SparkInjector): Unit = {
def injectSpark(injector: SparkInjector): Unit = {
// Inject the regular Spark rules directly.
injector.injectQueryStagePrepRule(FallbackBroadcastHashJoinPrepQueryStage.apply)
injector.injectQueryStagePrepRule(spark => CHAQEPropagateEmptyRelation(spark))
Expand All @@ -61,10 +61,9 @@ private object CHRuleApi {
injector.injectOptimizerRule(spark => CHAggregateFunctionRewriteRule(spark))
injector.injectOptimizerRule(_ => CountDistinctWithoutExpand)
injector.injectOptimizerRule(_ => EqualToRewrite)
CHExtendRule.injectSpark(injector)
}

private def injectLegacy(injector: LegacyInjector): Unit = {
def injectLegacy(injector: LegacyInjector): Unit = {
// Gluten columnar: Transform rules.
injector.injectTransform(_ => RemoveTransitions)
injector.injectTransform(_ => PushDownInputFileExpression.PreOffload)
Expand Down Expand Up @@ -108,7 +107,7 @@ private object CHRuleApi {
injector.injectFinal(_ => RemoveFallbackTagRule())
}

private def injectRas(injector: RasInjector): Unit = {
def injectRas(injector: RasInjector): Unit = {
// CH backend doesn't work with RAS at the moment. Inject a rule that aborts any
// execution calls.
injector.inject(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,8 @@ import org.apache.spark.sql.execution.datasources.clickhouse.utils.MergeTreeDelt

import org.apache.hadoop.fs.Path

import scala.collection.mutable.ListBuffer

/** Reserved table property for MergeTree table. */
object StorageMeta {
val Provider: String = "clickhouse"
val DEFAULT_FILE_FORMAT: String = "write.format.default"
val DEFAULT_FILE_FORMAT_DEFAULT: String = "mergetree"

// Storage properties
val DEFAULT_PATH_BASED_DATABASE: String = "clickhouse_db"
Expand All @@ -54,18 +49,6 @@ object StorageMeta {
STORAGE_PATH -> deltaPath.toString)
withMoreOptions(metadata, moreOptions)
}
def withMoreStorageInfo(metadata: Metadata, snapshotId: String, deltaPath: Path): Metadata = {
val moreOptions =
ListBuffer(STORAGE_SNAPSHOT_ID -> snapshotId, STORAGE_PATH -> deltaPath.toString)
// Path-based create table statement does not have storage_db and storage_table
if (!metadata.configuration.contains(STORAGE_DB)) {
moreOptions += STORAGE_DB -> DEFAULT_PATH_BASED_DATABASE
}
if (!metadata.configuration.contains(STORAGE_TABLE)) {
moreOptions += STORAGE_TABLE -> deltaPath.toUri.getPath
}
withMoreOptions(metadata, moreOptions.toSeq)
}

private def withMoreOptions(metadata: Metadata, newOptions: Seq[(String, String)]): Metadata = {
metadata.copy(configuration = metadata.configuration ++ newOptions)
Expand Down

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

0 comments on commit 8caba5d

Please sign in to comment.