From d09a05129a5b9535ca673c08a0e568a114b89305 Mon Sep 17 00:00:00 2001 From: wforget <643348094@qq.com> Date: Fri, 2 Aug 2024 16:09:20 +0800 Subject: [PATCH] [MINOR] Update velox doc --- docs/get-started/Velox.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/get-started/Velox.md b/docs/get-started/Velox.md index 63239c39931e..48fe83e0cf76 100644 --- a/docs/get-started/Velox.md +++ b/docs/get-started/Velox.md @@ -230,7 +230,7 @@ mvn clean package -Pbackends-velox -Pspark-3.3 -Pceleborn -DskipTests Then add the Gluten and Spark Celeborn Client packages to your Spark application's classpath(usually add them into `$SPARK_HOME/jars`). - Celeborn: celeborn-client-spark-3-shaded_2.12-[celebornVersion].jar -- Gluten: gluten-velox-bundle-spark3.x_2.12-xx_xx_xx-SNAPSHOT.jar, gluten-celeborn-package-xx-SNAPSHOT.jar +- Gluten: gluten-velox-bundle-spark3.x_2.12-xx_xx_xx-SNAPSHOT.jar (When `-Pceleborn` is specified the gluten bundle jar includes `gluten-celeborn-package`.) Currently to use Gluten following configurations are required in `spark-defaults.conf` @@ -279,7 +279,7 @@ mvn clean package -Pbackends-velox -Pspark-3.3 -Puniffle -DskipTests Then add the Uniffle and Spark Celeborn Client packages to your Spark application's classpath(usually add them into `$SPARK_HOME/jars`). - Uniffle: rss-client-spark3-shaded-[uniffleVersion].jar -- Gluten: gluten-uniffle-velox-xxx-SNAPSHOT-3.x.jar +- Gluten: gluten-velox-bundle-spark3.x_2.12-xx_xx_xx-SNAPSHOT.jar (When `-Puniffle` is specified the gluten bundle jar includes `gluten-uniffle-package`.) Currently to use Gluten following configurations are required in `spark-defaults.conf` @@ -298,7 +298,7 @@ spark.shuffle.service.enabled false spark.rss.storage.type LOCALFILE_HDFS # If you want to use dynamic resource allocation, -# please refer to this URL (https://github.com/apache/incubator-uniffle/tree/master/patch/spark) to apply the patch into your own Spark. +# please refer to this URL (https://uniffle.apache.org/docs/client-guide#support-spark-dynamic-allocation) for more details. spark.dynamicAllocation.enabled false ```