Skip to content

Commit 05b828b

Browse files
committed
[DNM] Bump Spark to 3.5.4
1 parent 5c18acc commit 05b828b

File tree

6 files changed

+26
-26
lines changed

6 files changed

+26
-26
lines changed

.github/workflows/util/install_spark_resources.sh

+10-10
Original file line numberDiff line numberDiff line change
@@ -63,26 +63,26 @@ case "$1" in
6363
3.5)
6464
# Spark-3.5
6565
cd ${INSTALL_DIR} && \
66-
wget -nv https://archive.apache.org/dist/spark/spark-3.5.2/spark-3.5.2-bin-hadoop3.tgz && \
67-
tar --strip-components=1 -xf spark-3.5.2-bin-hadoop3.tgz spark-3.5.2-bin-hadoop3/jars/ && \
68-
rm -rf spark-3.5.2-bin-hadoop3.tgz && \
66+
wget -nv https://archive.apache.org/dist/spark/spark-3.5.4/spark-3.5.4-bin-hadoop3.tgz && \
67+
tar --strip-components=1 -xf spark-3.5.4-bin-hadoop3.tgz spark-3.5.4-bin-hadoop3/jars/ && \
68+
rm -rf spark-3.5.4-bin-hadoop3.tgz && \
6969
mkdir -p ${INSTALL_DIR}/shims/spark35/spark_home/assembly/target/scala-2.12 && \
7070
mv jars ${INSTALL_DIR}/shims/spark35/spark_home/assembly/target/scala-2.12 && \
71-
wget -nv https://github.com/apache/spark/archive/refs/tags/v3.5.2.tar.gz && \
72-
tar --strip-components=1 -xf v3.5.2.tar.gz spark-3.5.2/sql/core/src/test/resources/ && \
71+
wget -nv https://github.com/apache/spark/archive/refs/tags/v3.5.4.tar.gz && \
72+
tar --strip-components=1 -xf v3.5.4.tar.gz spark-3.5.4/sql/core/src/test/resources/ && \
7373
mkdir -p shims/spark35/spark_home/ && \
7474
mv sql shims/spark35/spark_home/
7575
;;
7676
3.5-scala2.13)
7777
# Spark-3.5, scala 2.13
7878
cd ${INSTALL_DIR} && \
79-
wget -nv https://archive.apache.org/dist/spark/spark-3.5.2/spark-3.5.2-bin-hadoop3.tgz && \
80-
tar --strip-components=1 -xf spark-3.5.2-bin-hadoop3.tgz spark-3.5.2-bin-hadoop3/jars/ && \
81-
rm -rf spark-3.5.2-bin-hadoop3.tgz && \
79+
wget -nv https://archive.apache.org/dist/spark/spark-3.5.4/spark-3.5.4-bin-hadoop3.tgz && \
80+
tar --strip-components=1 -xf spark-3.5.4-bin-hadoop3.tgz spark-3.5.4-bin-hadoop3/jars/ && \
81+
rm -rf spark-3.5.4-bin-hadoop3.tgz && \
8282
mkdir -p ${INSTALL_DIR}/shims/spark35-scala2.13/spark_home/assembly/target/scala-2.13 && \
8383
mv jars ${INSTALL_DIR}/shims/spark35-scala2.13/spark_home/assembly/target/scala-2.13 && \
84-
wget -nv https://github.com/apache/spark/archive/refs/tags/v3.5.2.tar.gz && \
85-
tar --strip-components=1 -xf v3.5.2.tar.gz spark-3.5.2/sql/core/src/test/resources/ && \
84+
wget -nv https://github.com/apache/spark/archive/refs/tags/v3.5.4.tar.gz && \
85+
tar --strip-components=1 -xf v3.5.4.tar.gz spark-3.5.4/sql/core/src/test/resources/ && \
8686
mkdir -p shims/spark35-scala2.13/spark_home/ && \
8787
mv sql shims/spark35-scala2.13/spark_home/
8888
;;

.github/workflows/velox_backend.yml

+11-11
Original file line numberDiff line numberDiff line change
@@ -855,9 +855,9 @@ jobs:
855855
dnf module -y install python39 && \
856856
alternatives --set python3 /usr/bin/python3.9 && \
857857
pip3 install setuptools && \
858-
pip3 install pyspark==3.5.2 cython && \
858+
pip3 install pyspark==3.5.4 cython && \
859859
pip3 install pandas pyarrow
860-
- name: Build and Run unit test for Spark 3.5.2 (other tests)
860+
- name: Build and Run unit test for Spark 3.5.4 (other tests)
861861
run: |
862862
cd $GITHUB_WORKSPACE/
863863
export SPARK_SCALA_VERSION=2.12
@@ -898,9 +898,9 @@ jobs:
898898
dnf module -y install python39 && \
899899
alternatives --set python3 /usr/bin/python3.9 && \
900900
pip3 install setuptools && \
901-
pip3 install pyspark==3.5.2 cython && \
901+
pip3 install pyspark==3.5.4 cython && \
902902
pip3 install pandas pyarrow
903-
- name: Build and Run unit test for Spark 3.5.2 with scala-2.13 (other tests)
903+
- name: Build and Run unit test for Spark 3.5.4 with scala-2.13 (other tests)
904904
run: |
905905
cd $GITHUB_WORKSPACE/
906906
export SPARK_SCALA_VERSION=2.13
@@ -930,7 +930,7 @@ jobs:
930930
with:
931931
name: arrow-jars-centos-7-${{github.sha}}
932932
path: /root/.m2/repository/org/apache/arrow/
933-
- name: Build and Run unit test for Spark 3.5.2 (slow tests)
933+
- name: Build and Run unit test for Spark 3.5.4 (slow tests)
934934
run: |
935935
cd $GITHUB_WORKSPACE/
936936
$MVN_CMD clean test -Pspark-3.5 -Pbackends-velox -Pceleborn -Piceberg -Pdelta -Phudi -Pspark-ut \
@@ -964,9 +964,9 @@ jobs:
964964
dnf module -y install python39 && \
965965
alternatives --set python3 /usr/bin/python3.9 && \
966966
pip3 install setuptools && \
967-
pip3 install pyspark==3.5.2 cython && \
967+
pip3 install pyspark==3.5.4 cython && \
968968
pip3 install pandas pyarrow
969-
- name: Build and Run unit test for Spark 3.5.2 (other tests)
969+
- name: Build and Run unit test for Spark 3.5.4 (other tests)
970970
run: |
971971
cd $GITHUB_WORKSPACE/
972972
export SPARK_SCALA_VERSION=2.12
@@ -995,7 +995,7 @@ jobs:
995995
with:
996996
name: arrow-jars-centos-7-${{github.sha}}
997997
path: /root/.m2/repository/org/apache/arrow/
998-
- name: Build and Run unit test for Spark 3.5.2 (slow tests)
998+
- name: Build and Run unit test for Spark 3.5.4 (slow tests)
999999
run: |
10001000
cd $GITHUB_WORKSPACE/
10011001
$MVN_CMD clean test -Pspark-3.5 -Pbackends-velox -Pceleborn -Piceberg -Pdelta -Pspark-ut \
@@ -1028,9 +1028,9 @@ jobs:
10281028
dnf module -y install python39 && \
10291029
alternatives --set python3 /usr/bin/python3.9 && \
10301030
pip3 install setuptools && \
1031-
pip3 install pyspark==3.5.2 cython && \
1031+
pip3 install pyspark==3.5.4 cython && \
10321032
pip3 install pandas pyarrow
1033-
- name: Build and Run unit test for Spark 3.5.2 (other tests)
1033+
- name: Build and Run unit test for Spark 3.5.4 (other tests)
10341034
run: |
10351035
cd $GITHUB_WORKSPACE/
10361036
export SPARK_SCALA_VERSION=2.12
@@ -1059,7 +1059,7 @@ jobs:
10591059
with:
10601060
name: arrow-jars-centos-7-${{github.sha}}
10611061
path: /root/.m2/repository/org/apache/arrow/
1062-
- name: Build and Run unit test for Spark 3.5.2 (slow tests)
1062+
- name: Build and Run unit test for Spark 3.5.4 (slow tests)
10631063
run: |
10641064
cd $GITHUB_WORKSPACE/
10651065
$MVN_CMD clean test -Pspark-3.5 -Pbackends-velox -Pceleborn -Piceberg -Pdelta -Pspark-ut \

docs/get-started/Velox.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ parent: Getting-Started
99

1010
| Type | Version |
1111
|-------|------------------------------|
12-
| Spark | 3.2.2, 3.3.1, 3.4.4, 3.5.2 |
12+
| Spark | 3.2.2, 3.3.1, 3.4.4, 3.5.4 |
1313
| OS | Ubuntu20.04/22.04, Centos7/8 |
1414
| jdk | openjdk8/jdk17 |
1515
| scala | 2.12 |
@@ -18,7 +18,7 @@ parent: Getting-Started
1818

1919
Currently, with static build Gluten+Velox backend supports all the Linux OSes, but is only tested on **Ubuntu20.04/Ubuntu22.04/Centos7/Centos8**. With dynamic build, Gluten+Velox backend support **Ubuntu20.04/Ubuntu22.04/Centos7/Centos8** and their variants.
2020

21-
Currently, the officially supported Spark versions are 3.2.2, 3.3.1, 3.4.4 and 3.5.2.
21+
Currently, the officially supported Spark versions are 3.2.2, 3.3.1, 3.4.4 and 3.5.4.
2222

2323
We need to set up the `JAVA_HOME` env. Currently, Gluten supports **java 8** and **java 17**.
2424

docs/get-started/build-guide.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -74,4 +74,4 @@ It's name pattern is `gluten-<backend_type>-bundle-spark<spark.bundle.version>_<
7474
| 3.2.2 | 3.2 | 2.12 |
7575
| 3.3.1 | 3.3 | 2.12 |
7676
| 3.4.4 | 3.4 | 2.12 |
77-
| 3.5.2 | 3.5 | 2.12 |
77+
| 3.5.4 | 3.5 | 2.12 |

pom.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -336,7 +336,7 @@
336336
<properties>
337337
<sparkbundle.version>3.5</sparkbundle.version>
338338
<sparkshim.artifactId>spark-sql-columnar-shims-spark35</sparkshim.artifactId>
339-
<spark.version>3.5.2</spark.version>
339+
<spark.version>3.5.4</spark.version>
340340
<iceberg.version>1.5.0</iceberg.version>
341341
<delta.package.name>delta-spark</delta.package.name>
342342
<delta.version>3.2.0</delta.version>

tools/gluten-it/pom.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@
170170
<profile>
171171
<id>spark-3.5</id>
172172
<properties>
173-
<spark.version>3.5.2</spark.version>
173+
<spark.version>3.5.4</spark.version>
174174
<scala.library.version>2.12.18</scala.library.version>
175175
</properties>
176176
</profile>

0 commit comments

Comments
 (0)