diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index bc82025edf3..ad76dfd4ef4 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -57,25 +57,25 @@ jobs:
comment: ["normal"]
include:
- java: 8
- spark: '3.4'
+ spark: '3.5'
spark-archive: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.1.3 -Dspark.archive.name=spark-3.1.3-bin-hadoop3.2.tgz -Pzookeeper-3.6'
exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
comment: 'verify-on-spark-3.1-binary'
- java: 8
- spark: '3.4'
+ spark: '3.5'
spark-archive: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.2.4 -Dspark.archive.name=spark-3.2.4-bin-hadoop3.2.tgz -Pzookeeper-3.6'
exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
comment: 'verify-on-spark-3.2-binary'
- java: 8
- spark: '3.4'
+ spark: '3.5'
spark-archive: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.3.3 -Dspark.archive.name=spark-3.3.3-bin-hadoop3.tgz -Pzookeeper-3.6'
exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
comment: 'verify-on-spark-3.3-binary'
- java: 8
- spark: '3.4'
- spark-archive: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.5.1 -Dspark.archive.name=spark-3.5.1-bin-hadoop3.tgz -Pzookeeper-3.6'
+ spark: '3.5'
+ spark-archive: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.4.2 -Dspark.archive.name=spark-3.4.2-bin-hadoop3.tgz -Pzookeeper-3.6'
exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
- comment: 'verify-on-spark-3.5-binary'
+ comment: 'verify-on-spark-3.4-binary'
exclude:
# SPARK-33772: Spark supports JDK 17 since 3.3.0
- java: 17
@@ -105,7 +105,7 @@ jobs:
python-version: '3.9'
- name: Build and test Kyuubi and Spark with maven w/o linters
run: |
- if [[ "${{ matrix.java }}" == "8" && "${{ matrix.spark }}" == "3.4" && "${{ matrix.spark-archive }}" == "" ]]; then
+ if [[ "${{ matrix.java }}" == "8" && "${{ matrix.spark }}" == "3.5" && "${{ matrix.spark-archive }}" == "" ]]; then
MVN_OPT="${MVN_OPT} -Pcodecov"
fi
TEST_MODULES="dev/kyuubi-codecov"
@@ -114,7 +114,7 @@ jobs:
- name: Code coverage
if: |
matrix.java == 8 &&
- matrix.spark == '3.4' &&
+ matrix.spark == '3.5' &&
matrix.spark-archive == ''
uses: codecov/codecov-action@v3
with:
@@ -140,7 +140,7 @@ jobs:
java:
- '8'
spark:
- - '3.4'
+ - '3.5'
steps:
- uses: actions/checkout@v4
- name: Free up disk space
@@ -374,8 +374,8 @@ jobs:
# https://minikube.sigs.k8s.io/docs/handbook/pushing/#7-loading-directly-to-in-cluster-container-runtime
minikube image load apache/kyuubi:latest
# pre-install spark into minikube
- docker pull apache/spark:3.4.2
- minikube image load apache/spark:3.4.2
+ docker pull apache/spark:3.5.1
+ minikube image load apache/spark:3.5.1
- name: kubectl pre-check
run: |
kubectl get nodes
diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml
index 1ba696bbe6f..728c3ad5c08 100644
--- a/.github/workflows/nightly.yml
+++ b/.github/workflows/nightly.yml
@@ -29,7 +29,7 @@ jobs:
strategy:
matrix:
profiles:
- - '-Pspark-master -pl externals/kyuubi-spark-sql-engine -am'
+ - '-Pscala-2.13 -Pspark-master -pl externals/kyuubi-spark-sql-engine -am'
env:
SPARK_LOCAL_IP: localhost
steps:
diff --git a/build/release/release.sh b/build/release/release.sh
index de32a492e2e..e1aaae83f6b 100755
--- a/build/release/release.sh
+++ b/build/release/release.sh
@@ -120,18 +120,18 @@ upload_nexus_staging() {
-s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
-pl extensions/spark/kyuubi-extension-spark-3-3 -am
- # Spark Extension Plugin for Spark 3.5
- ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.5 \
+ # Spark Extension Plugin for Spark 3.4
+ ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.4 \
-s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
- -pl extensions/spark/kyuubi-extension-spark-3-5 -am
+ -pl extensions/spark/kyuubi-extension-spark-3-4 -am
- # Spark Hive/TPC-DS/TPC-H Connector built with default Spark version (3.4) and Scala 2.13
- ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.4,scala-2.13 \
+ # Spark Hive/TPC-DS/TPC-H Connector built with default Spark version (3.5) and Scala 2.13
+ ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.5,scala-2.13 \
-s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
-pl extensions/spark/kyuubi-spark-connector-hive,extensions/spark/kyuubi-spark-connector-tpcds,extensions/spark/kyuubi-spark-connector-tpch -am
- # All modules including Spark Extension Plugin and Connectors built with default Spark version (3.4) and default Scala version (2.12)
- ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.4 \
+ # All modules including Spark Extension Plugin and Connectors built with default Spark version (3.5) and default Scala version (2.12)
+ ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.5 \
-s "${KYUUBI_DIR}/build/release/asf-settings.xml"
}
diff --git a/docs/contributing/code/building.md b/docs/contributing/code/building.md
index 82409fc9e52..502925874a7 100644
--- a/docs/contributing/code/building.md
+++ b/docs/contributing/code/building.md
@@ -65,8 +65,8 @@ Since v1.1.0, Kyuubi support building with different Spark profiles,
|-------------|---------|-------|
| -Pspark-3.2 | | 1.4.0 |
| -Pspark-3.3 | | 1.6.0 |
-| -Pspark-3.4 | ✓ | 1.8.0 |
-| -Pspark-3.5 | | 1.8.0 |
+| -Pspark-3.4 | | 1.8.0 |
+| -Pspark-3.5 | ✓ | 1.8.0 |
## Building Kyuubi Against Different Scala Versions
diff --git a/docs/extensions/engines/spark/lineage.md b/docs/extensions/engines/spark/lineage.md
index 2dbb2a026d3..b634473d49b 100644
--- a/docs/extensions/engines/spark/lineage.md
+++ b/docs/extensions/engines/spark/lineage.md
@@ -117,7 +117,7 @@ Sometimes, it may be incompatible with other Spark distributions, then you may n
For example,
```shell
-build/mvn clean package -pl :kyuubi-spark-lineage_2.12 -am -DskipTests -Dspark.version=3.1.2
+build/mvn clean package -pl :kyuubi-spark-lineage_2.12 -am -DskipTests -Dspark.version=3.5.1
```
The available `spark.version`s are shown in the following table.
@@ -125,11 +125,12 @@ The available `spark.version`s are shown in the following table.
| Spark Version | Supported | Remark |
|:-------------:|:---------:|:------:|
| master | √ | - |
+| 3.5.x | √ | - |
| 3.4.x | √ | - |
| 3.3.x | √ | - |
| 3.2.x | √ | - |
-| 3.1.x | √ | - |
-| 3.0.x | √ | - |
+| 3.1.x | x | - |
+| 3.0.x | x | - |
| 2.4.x | x | - |
Currently, Spark released with Scala 2.12 are supported.
diff --git a/docs/extensions/engines/spark/rules.md b/docs/extensions/engines/spark/rules.md
index c8bd6b4d5d4..986fda14c54 100644
--- a/docs/extensions/engines/spark/rules.md
+++ b/docs/extensions/engines/spark/rules.md
@@ -49,7 +49,7 @@ And don't worry, Kyuubi will support the new Apache Spark version in the future.
| kyuubi-extension-spark-3-2 | 3.2.x | 1.4.0-incubating | N/A | 1.4.0-incubating | spark-3.2 |
| kyuubi-extension-spark-3-3 | 3.3.x | 1.6.0-incubating | N/A | 1.6.0-incubating | spark-3.3 |
| kyuubi-extension-spark-3-4 | 3.4.x | 1.8.0 | N/A | 1.8.0 | spark-3.4 |
-| kyuubi-extension-spark-3-4 | 3.5.x | 1.8.0 | N/A | N/A | spark-3.5 |
+| kyuubi-extension-spark-3-5 | 3.5.x | 1.8.0 | N/A | 1.9.0 | spark-3.5 |
1. Check the matrix that if you are using the supported Spark version, and find the corresponding Kyuubi Spark SQL Extension jar
2. Get the Kyuubi Spark SQL Extension jar
diff --git a/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/ZorderCoreBenchmark.scala b/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/ZorderCoreBenchmark.scala
index b2ce305e435..7af1ca0482b 100644
--- a/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/ZorderCoreBenchmark.scala
+++ b/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/ZorderCoreBenchmark.scala
@@ -29,8 +29,8 @@ import org.apache.kyuubi.sql.zorder.ZorderBytesUtils
*
* {{{
* RUN_BENCHMARK=1 ./build/mvn clean test \
- * -pl extensions/spark/kyuubi-extension-spark-3-4 -am \
- * -Pspark-3.4,kyuubi-extension-spark-3-4 \
+ * -pl extensions/spark/kyuubi-extension-spark-3-5 -am \
+ * -Pspark-3.5,kyuubi-extension-spark-3-5 \
* -Dtest=none -DwildcardSuites=org.apache.spark.sql.ZorderCoreBenchmark
* }}}
*/
diff --git a/extensions/spark/kyuubi-spark-authz/README.md b/extensions/spark/kyuubi-spark-authz/README.md
index 43ee45b09a8..eb295c68c5d 100644
--- a/extensions/spark/kyuubi-spark-authz/README.md
+++ b/extensions/spark/kyuubi-spark-authz/README.md
@@ -34,8 +34,8 @@ build/mvn clean package -DskipTests -pl :kyuubi-spark-authz_2.12 -am -Dspark.ver
`-Dspark.version=`
- [x] master
-- [x] 3.5.x
-- [x] 3.4.x (default)
+- [x] 3.5.x (default)
+- [x] 3.4.x
- [x] 3.3.x
- [x] 3.2.x
- [x] 3.1.x
diff --git a/extensions/spark/kyuubi-spark-lineage/README.md b/extensions/spark/kyuubi-spark-lineage/README.md
index 1c42d3736e3..3f24cd1730c 100644
--- a/extensions/spark/kyuubi-spark-lineage/README.md
+++ b/extensions/spark/kyuubi-spark-lineage/README.md
@@ -34,8 +34,9 @@ build/mvn clean package -DskipTests -pl :kyuubi-spark-lineage_2.12 -am -Dspark.v
`-Dspark.version=`
- [x] master
-- [ ] 3.4.x
-- [x] 3.3.x (default)
+- [x] 3.5.x (default)
+- [x] 3.4.x
+- [x] 3.3.x
- [x] 3.2.x
- [x] 3.1.x
diff --git a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
index 9d47ab99815..1256687ae22 100644
--- a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
+++ b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
@@ -55,7 +55,7 @@ class KyuubiOnKubernetesWithSparkTestsBase extends WithKyuubiServerOnKubernetes
Map(
"spark.master" -> s"k8s://$miniKubeApiMaster",
// We should update spark docker image in ./github/workflows/master.yml at the same time
- "spark.kubernetes.container.image" -> "apache/spark:3.4.2",
+ "spark.kubernetes.container.image" -> "apache/spark:3.5.1",
"spark.kubernetes.container.image.pullPolicy" -> "IfNotPresent",
"spark.executor.memory" -> "512M",
"spark.driver.memory" -> "1024M",
diff --git a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
index a32a45d6c27..cf4b3ff3b5a 100644
--- a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
+++ b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
@@ -50,7 +50,7 @@ abstract class SparkOnKubernetesSuiteBase
// TODO Support more Spark version
// Spark official docker image: https://hub.docker.com/r/apache/spark/tags
KyuubiConf().set("spark.master", s"k8s://$apiServerAddress")
- .set("spark.kubernetes.container.image", "apache/spark:3.4.2")
+ .set("spark.kubernetes.container.image", "apache/spark:3.5.1")
.set("spark.kubernetes.container.image.pullPolicy", "IfNotPresent")
.set("spark.executor.instances", "1")
.set("spark.executor.memory", "512M")
diff --git a/pom.xml b/pom.xml
index 85b2e023225..7e1d0748d2c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -133,8 +133,8 @@
2.11.0
2.6
3.13.0
- delta-core
- 2.4.0
+ delta-spark
+ 3.1.0
3.3.2
0.9.3
0.62.2
@@ -198,10 +198,10 @@
2.2
- 3.4.2
- 3.4
+ 3.5.1
+ 3.5
spark-${spark.version}-bin-hadoop3${spark.archive.scala.suffix}.tgz
${apache.archive.dist}/spark/spark-${spark.version}
@@ -235,7 +235,7 @@
1.12.1
4.8.0
2.2.0
- org.scalatest.tags.Slow,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest
+ org.scalatest.tags.Slow
false
2.30.0