-
Notifications
You must be signed in to change notification settings - Fork 67
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Spark 3.4: Support distribute by any predefined transform
- Loading branch information
Showing
10 changed files
with
407 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
117 changes: 117 additions & 0 deletions
117
...src/test/scala/org/apache/spark/sql/clickhouse/cluster/ClusterShardByTransformSuite.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,117 @@ | ||
/* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* https://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package org.apache.spark.sql.clickhouse.cluster | ||
|
||
import org.apache.spark.SparkConf | ||
import org.apache.spark.sql.Row | ||
|
||
class ClusterShardByTransformSuite extends SparkClickHouseClusterTest { | ||
override protected def sparkConf: SparkConf = { | ||
val _conf = super.sparkConf | ||
.set("spark.clickhouse.write.distributed.convertLocal", "true") | ||
_conf | ||
} | ||
|
||
def runTest(func_name: String, func_args: Array[String]): Unit = { | ||
val func_expr = s"$func_name(${func_args.mkString(",")})" | ||
val cluster = "single_replica" | ||
val db = s"db_${func_name}_shard_transform" | ||
val tbl_dist = s"tbl_${func_name}_shard" | ||
val tbl_local = s"${tbl_dist}_local" | ||
|
||
try { | ||
runClickHouseSQL(s"CREATE DATABASE IF NOT EXISTS $db ON CLUSTER $cluster") | ||
|
||
spark.sql( | ||
s"""CREATE TABLE $db.$tbl_local ( | ||
| create_time TIMESTAMP NOT NULL, | ||
| create_date DATE NOT NULL, | ||
| value STRING NOT NULL | ||
|) USING ClickHouse | ||
|TBLPROPERTIES ( | ||
| cluster = '$cluster', | ||
| engine = 'MergeTree()', | ||
| order_by = 'create_time' | ||
|) | ||
|""".stripMargin | ||
) | ||
|
||
runClickHouseSQL( | ||
s"""CREATE TABLE $db.$tbl_dist ON CLUSTER $cluster | ||
|AS $db.$tbl_local | ||
|ENGINE = Distributed($cluster, '$db', '$tbl_local', $func_expr) | ||
|""".stripMargin | ||
) | ||
spark.sql( | ||
s"""INSERT INTO `$db`.`$tbl_dist` | ||
|VALUES | ||
| (timestamp'2021-01-01 10:10:10', date'2021-01-01', '1'), | ||
| (timestamp'2022-02-02 11:10:10', date'2022-02-02', '2'), | ||
| (timestamp'2023-03-03 12:10:10', date'2023-03-03', '3'), | ||
| (timestamp'2024-04-04 13:10:10', date'2024-04-04', '4') | ||
| AS tab(create_time, create_date, value) | ||
|""".stripMargin | ||
) | ||
// check that data is indeed written | ||
checkAnswer( | ||
spark.table(s"$db.$tbl_dist").select("value").orderBy("create_time"), | ||
Seq(Row("1"), Row("2"), Row("3"), Row("4")) | ||
) | ||
|
||
// check same data is sharded in the same server comparing native sharding | ||
runClickHouseSQL( | ||
s"""INSERT INTO `$db`.`$tbl_dist` | ||
|VALUES | ||
| (timestamp'2021-01-01 10:10:10', date'2021-01-01', '1'), | ||
| (timestamp'2022-02-02 11:10:10', date'2022-02-02', '2'), | ||
| (timestamp'2023-03-03 12:10:10', date'2023-03-03', '3'), | ||
| (timestamp'2024-04-04 13:10:10', date'2024-04-04', '4') | ||
|""".stripMargin | ||
) | ||
checkAnswer( | ||
spark.table(s"$db.$tbl_local") | ||
.groupBy("value").count().filter("count != 2"), | ||
Seq.empty | ||
) | ||
|
||
} finally { | ||
runClickHouseSQL(s"DROP TABLE IF EXISTS $db.$tbl_dist ON CLUSTER $cluster") | ||
runClickHouseSQL(s"DROP TABLE IF EXISTS $db.$tbl_local ON CLUSTER $cluster") | ||
runClickHouseSQL(s"DROP DATABASE IF EXISTS $db ON CLUSTER $cluster") | ||
} | ||
} | ||
|
||
Seq( | ||
// wait for SPARK-44180 to be fixed, then add implicit cast test cases | ||
("toYear", Array("create_date")), | ||
// ("toYear", Array("create_time")), | ||
("toYYYYMM", Array("create_date")), | ||
// ("toYYYYMM", Array("create_time")), | ||
("toYYYYMMDD", Array("create_date")), | ||
// ("toYYYYMMDD", Array("create_time")), | ||
("toHour", Array("create_time")), | ||
("xxHash64", Array("value")), | ||
("murmurHash2_64", Array("value")), | ||
("murmurHash2_32", Array("value")), | ||
("murmurHash3_64", Array("value")), | ||
("murmurHash3_32", Array("value")), | ||
("cityHash64", Array("value")), | ||
("modulo", Array("toYYYYMM(create_date)", "10")) | ||
).foreach { | ||
case (func_name: String, func_args: Array[String]) => | ||
test(s"shard by $func_name(${func_args.mkString(",")})")(runTest(func_name, func_args)) | ||
} | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
52 changes: 52 additions & 0 deletions
52
spark-3.4/clickhouse-spark/src/main/scala/xenon/clickhouse/func/Days.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,52 @@ | ||
/* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* https://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package xenon.clickhouse.func | ||
|
||
import org.apache.spark.sql.connector.catalog.functions.{BoundFunction, ScalarFunction, UnboundFunction} | ||
import org.apache.spark.sql.types._ | ||
|
||
import java.time.LocalDate | ||
import java.time.format.DateTimeFormatter | ||
|
||
object Days extends UnboundFunction with ScalarFunction[Int] with ClickhouseEquivFunction { | ||
|
||
override def name: String = "clickhouse_days" | ||
|
||
override def canonicalName: String = s"clickhouse.$name" | ||
|
||
override def toString: String = name | ||
|
||
override val ckFuncNames: Array[String] = Array("toYYYYMMDD") | ||
|
||
override def description: String = s"$name: (date: Date) => shard_num: int" | ||
|
||
override def bind(inputType: StructType): BoundFunction = inputType.fields match { | ||
case Array(StructField(_, DateType, _, _)) => this | ||
// case Array(StructField(_, TimestampType, _, _)) | Array(StructField(_, TimestampNTZType, _, _)) => this | ||
case _ => throw new UnsupportedOperationException(s"Expect 1 DATE argument. $description") | ||
} | ||
|
||
override def inputTypes: Array[DataType] = Array(DateType) | ||
|
||
override def resultType: DataType = IntegerType | ||
|
||
override def isResultNullable: Boolean = false | ||
|
||
def invoke(days: Int): Int = { | ||
val date = LocalDate.ofEpochDay(days) | ||
val formatter = DateTimeFormatter.ofPattern("yyyyMMdd") | ||
date.format(formatter).toInt | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
51 changes: 51 additions & 0 deletions
51
spark-3.4/clickhouse-spark/src/main/scala/xenon/clickhouse/func/Hours.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,51 @@ | ||
/* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* https://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package xenon.clickhouse.func | ||
|
||
import org.apache.spark.sql.connector.catalog.functions.{BoundFunction, ScalarFunction, UnboundFunction} | ||
import org.apache.spark.sql.types._ | ||
|
||
import java.sql.Timestamp | ||
import java.text.SimpleDateFormat | ||
|
||
object Hours extends UnboundFunction with ScalarFunction[Int] with ClickhouseEquivFunction { | ||
|
||
override def name: String = "clickhouse_hours" | ||
|
||
override def canonicalName: String = s"clickhouse.$name" | ||
|
||
override def toString: String = name | ||
|
||
override val ckFuncNames: Array[String] = Array("toHour", "HOUR") | ||
|
||
override def description: String = s"$name: (time: timestamp) => shard_num: int" | ||
|
||
override def bind(inputType: StructType): BoundFunction = inputType.fields match { | ||
case Array(StructField(_, TimestampType, _, _)) | Array(StructField(_, TimestampNTZType, _, _)) => this | ||
case _ => throw new UnsupportedOperationException(s"Expect 1 TIMESTAMP argument. $description") | ||
} | ||
|
||
override def inputTypes: Array[DataType] = Array(TimestampType) | ||
|
||
override def resultType: DataType = IntegerType | ||
|
||
override def isResultNullable: Boolean = false | ||
|
||
def invoke(time: Long): Int = { | ||
val ts = new Timestamp(time / 1000) | ||
val formatter: SimpleDateFormat = new SimpleDateFormat("hh") | ||
formatter.format(ts).toInt | ||
} | ||
} |
63 changes: 63 additions & 0 deletions
63
spark-3.4/clickhouse-spark/src/main/scala/xenon/clickhouse/func/Mod.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
/* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* https://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package xenon.clickhouse.func | ||
|
||
import org.apache.spark.sql.connector.catalog.functions.{BoundFunction, ScalarFunction, UnboundFunction} | ||
import org.apache.spark.sql.types._ | ||
|
||
object Mod extends UnboundFunction with ScalarFunction[Long] with ClickhouseEquivFunction { | ||
|
||
override def name: String = "sharding_mod" | ||
|
||
override def canonicalName: String = s"clickhouse.$name" | ||
|
||
override def toString: String = name | ||
|
||
// remainder is not a Clickhouse function, but modulo will be parsed to remainder in the connector. | ||
// Added remainder as a synonym. | ||
override val ckFuncNames: Array[String] = Array("modulo", "remainder") | ||
|
||
override def description: String = s"$name: (a: long, b: long) => mod: long" | ||
|
||
override def bind(inputType: StructType): BoundFunction = inputType.fields match { | ||
case Array(a, b) if | ||
(a match { | ||
case StructField(_, LongType, _, _) => true | ||
case StructField(_, IntegerType, _, _) => true | ||
case StructField(_, ShortType, _, _) => true | ||
case StructField(_, ByteType, _, _) => true | ||
case StructField(_, StringType, _, _) => true | ||
case _ => false | ||
}) && | ||
(b match { | ||
case StructField(_, LongType, _, _) => true | ||
case StructField(_, IntegerType, _, _) => true | ||
case StructField(_, ShortType, _, _) => true | ||
case StructField(_, ByteType, _, _) => true | ||
case StructField(_, StringType, _, _) => true | ||
case _ => false | ||
}) => | ||
this | ||
case _ => throw new UnsupportedOperationException(s"Expect 2 integer arguments. $description") | ||
} | ||
|
||
override def inputTypes: Array[DataType] = Array(LongType, LongType) | ||
|
||
override def resultType: DataType = LongType | ||
|
||
override def isResultNullable: Boolean = false | ||
|
||
def invoke(a: Long, b: Long): Long = a % b | ||
} |
52 changes: 52 additions & 0 deletions
52
spark-3.4/clickhouse-spark/src/main/scala/xenon/clickhouse/func/Months.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,52 @@ | ||
/* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* https://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package xenon.clickhouse.func | ||
|
||
import org.apache.spark.sql.connector.catalog.functions.{BoundFunction, ScalarFunction, UnboundFunction} | ||
import org.apache.spark.sql.types._ | ||
|
||
import java.time.LocalDate | ||
import java.time.format.DateTimeFormatter | ||
|
||
object Months extends UnboundFunction with ScalarFunction[Int] with ClickhouseEquivFunction { | ||
|
||
override def name: String = "clickhouse_months" | ||
|
||
override def canonicalName: String = s"clickhouse.$name" | ||
|
||
override def toString: String = name | ||
|
||
override val ckFuncNames: Array[String] = Array("toYYYYMM") | ||
|
||
override def description: String = s"$name: (date: Date) => shard_num: int" | ||
|
||
override def bind(inputType: StructType): BoundFunction = inputType.fields match { | ||
case Array(StructField(_, DateType, _, _)) => this | ||
// case Array(StructField(_, TimestampType, _, _)) | Array(StructField(_, TimestampNTZType, _, _)) => this | ||
case _ => throw new UnsupportedOperationException(s"Expect 1 DATE argument. $description") | ||
} | ||
|
||
override def inputTypes: Array[DataType] = Array(DateType) | ||
|
||
override def resultType: DataType = IntegerType | ||
|
||
override def isResultNullable: Boolean = false | ||
|
||
def invoke(days: Int): Int = { | ||
val date = LocalDate.ofEpochDay(days) | ||
val formatter = DateTimeFormatter.ofPattern("yyyyMM") | ||
date.format(formatter).toInt | ||
} | ||
} |
Oops, something went wrong.