Skip to content

Commit

Permalink
Set default Spark version to 3.5
Browse files Browse the repository at this point in the history
  • Loading branch information
pan3793 committed Mar 12, 2024
1 parent da2f079 commit f386aeb
Show file tree
Hide file tree
Showing 12 changed files with 41 additions and 39 deletions.
22 changes: 11 additions & 11 deletions .github/workflows/master.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,25 +57,25 @@ jobs:
comment: ["normal"]
include:
- java: 8
spark: '3.4'
spark: '3.5'
spark-archive: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.1.3 -Dspark.archive.name=spark-3.1.3-bin-hadoop3.2.tgz -Pzookeeper-3.6'
exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
comment: 'verify-on-spark-3.1-binary'
- java: 8
spark: '3.4'
spark: '3.5'
spark-archive: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.2.4 -Dspark.archive.name=spark-3.2.4-bin-hadoop3.2.tgz -Pzookeeper-3.6'
exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
comment: 'verify-on-spark-3.2-binary'
- java: 8
spark: '3.4'
spark: '3.5'
spark-archive: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.3.3 -Dspark.archive.name=spark-3.3.3-bin-hadoop3.tgz -Pzookeeper-3.6'
exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
comment: 'verify-on-spark-3.3-binary'
- java: 8
spark: '3.4'
spark-archive: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.5.1 -Dspark.archive.name=spark-3.5.1-bin-hadoop3.tgz -Pzookeeper-3.6'
spark: '3.5'
spark-archive: '-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.4.2 -Dspark.archive.name=spark-3.4.2-bin-hadoop3.tgz -Pzookeeper-3.6'
exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
comment: 'verify-on-spark-3.5-binary'
comment: 'verify-on-spark-3.4-binary'
exclude:
# SPARK-33772: Spark supports JDK 17 since 3.3.0
- java: 17
Expand Down Expand Up @@ -105,7 +105,7 @@ jobs:
python-version: '3.9'
- name: Build and test Kyuubi and Spark with maven w/o linters
run: |
if [[ "${{ matrix.java }}" == "8" && "${{ matrix.spark }}" == "3.4" && "${{ matrix.spark-archive }}" == "" ]]; then
if [[ "${{ matrix.java }}" == "8" && "${{ matrix.spark }}" == "3.5" && "${{ matrix.spark-archive }}" == "" ]]; then
MVN_OPT="${MVN_OPT} -Pcodecov"
fi
TEST_MODULES="dev/kyuubi-codecov"
Expand All @@ -114,7 +114,7 @@ jobs:
- name: Code coverage
if: |
matrix.java == 8 &&
matrix.spark == '3.4' &&
matrix.spark == '3.5' &&
matrix.spark-archive == ''
uses: codecov/codecov-action@v3
with:
Expand All @@ -140,7 +140,7 @@ jobs:
java:
- '8'
spark:
- '3.4'
- '3.5'
steps:
- uses: actions/checkout@v4
- name: Free up disk space
Expand Down Expand Up @@ -374,8 +374,8 @@ jobs:
# https://minikube.sigs.k8s.io/docs/handbook/pushing/#7-loading-directly-to-in-cluster-container-runtime
minikube image load apache/kyuubi:latest
# pre-install spark into minikube
docker pull apache/spark:3.4.2
minikube image load apache/spark:3.4.2
docker pull apache/spark:3.5.1
minikube image load apache/spark:3.5.1
- name: kubectl pre-check
run: |
kubectl get nodes
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ jobs:
strategy:
matrix:
profiles:
- '-Pspark-master -pl externals/kyuubi-spark-sql-engine -am'
- '-Pscala-2.13 -Pspark-master -pl externals/kyuubi-spark-sql-engine -am'
env:
SPARK_LOCAL_IP: localhost
steps:
Expand Down
14 changes: 7 additions & 7 deletions build/release/release.sh
Original file line number Diff line number Diff line change
Expand Up @@ -120,18 +120,18 @@ upload_nexus_staging() {
-s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
-pl extensions/spark/kyuubi-extension-spark-3-3 -am

# Spark Extension Plugin for Spark 3.5
${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.5 \
# Spark Extension Plugin for Spark 3.4
${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.4 \
-s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
-pl extensions/spark/kyuubi-extension-spark-3-5 -am
-pl extensions/spark/kyuubi-extension-spark-3-4 -am

# Spark Hive/TPC-DS/TPC-H Connector built with default Spark version (3.4) and Scala 2.13
${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.4,scala-2.13 \
# Spark Hive/TPC-DS/TPC-H Connector built with default Spark version (3.5) and Scala 2.13
${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.5,scala-2.13 \
-s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
-pl extensions/spark/kyuubi-spark-connector-hive,extensions/spark/kyuubi-spark-connector-tpcds,extensions/spark/kyuubi-spark-connector-tpch -am

# All modules including Spark Extension Plugin and Connectors built with default Spark version (3.4) and default Scala version (2.12)
${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.4 \
# All modules including Spark Extension Plugin and Connectors built with default Spark version (3.5) and default Scala version (2.12)
${KYUUBI_DIR}/build/mvn clean deploy -DskipTests -Papache-release,flink-provided,spark-provided,hive-provided,spark-3.5 \
-s "${KYUUBI_DIR}/build/release/asf-settings.xml"
}

Expand Down
4 changes: 2 additions & 2 deletions docs/contributing/code/building.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ Since v1.1.0, Kyuubi support building with different Spark profiles,
|-------------|---------|-------|
| -Pspark-3.2 | | 1.4.0 |
| -Pspark-3.3 | | 1.6.0 |
| -Pspark-3.4 | | 1.8.0 |
| -Pspark-3.5 | | 1.8.0 |
| -Pspark-3.4 | | 1.8.0 |
| -Pspark-3.5 | | 1.8.0 |

## Building Kyuubi Against Different Scala Versions

Expand Down
7 changes: 4 additions & 3 deletions docs/extensions/engines/spark/lineage.md
Original file line number Diff line number Diff line change
Expand Up @@ -117,19 +117,20 @@ Sometimes, it may be incompatible with other Spark distributions, then you may n
For example,

```shell
build/mvn clean package -pl :kyuubi-spark-lineage_2.12 -am -DskipTests -Dspark.version=3.1.2
build/mvn clean package -pl :kyuubi-spark-lineage_2.12 -am -DskipTests -Dspark.version=3.5.1
```

The available `spark.version`s are shown in the following table.

| Spark Version | Supported | Remark |
|:-------------:|:---------:|:------:|
| master || - |
| 3.5.x || - |
| 3.4.x || - |
| 3.3.x || - |
| 3.2.x || - |
| 3.1.x | | - |
| 3.0.x | | - |
| 3.1.x | x | - |
| 3.0.x | x | - |
| 2.4.x | x | - |

Currently, Spark released with Scala 2.12 are supported.
Expand Down
2 changes: 1 addition & 1 deletion docs/extensions/engines/spark/rules.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ And don't worry, Kyuubi will support the new Apache Spark version in the future.
| kyuubi-extension-spark-3-2 | 3.2.x | 1.4.0-incubating | N/A | 1.4.0-incubating | spark-3.2 |
| kyuubi-extension-spark-3-3 | 3.3.x | 1.6.0-incubating | N/A | 1.6.0-incubating | spark-3.3 |
| kyuubi-extension-spark-3-4 | 3.4.x | 1.8.0 | N/A | 1.8.0 | spark-3.4 |
| kyuubi-extension-spark-3-4 | 3.5.x | 1.8.0 | N/A | N/A | spark-3.5 |
| kyuubi-extension-spark-3-5 | 3.5.x | 1.8.0 | N/A | 1.9.0 | spark-3.5 |

1. Check the matrix that if you are using the supported Spark version, and find the corresponding Kyuubi Spark SQL Extension jar
2. Get the Kyuubi Spark SQL Extension jar
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ import org.apache.kyuubi.sql.zorder.ZorderBytesUtils
*
* {{{
* RUN_BENCHMARK=1 ./build/mvn clean test \
* -pl extensions/spark/kyuubi-extension-spark-3-4 -am \
* -Pspark-3.4,kyuubi-extension-spark-3-4 \
* -pl extensions/spark/kyuubi-extension-spark-3-5 -am \
* -Pspark-3.5,kyuubi-extension-spark-3-5 \
* -Dtest=none -DwildcardSuites=org.apache.spark.sql.ZorderCoreBenchmark
* }}}
*/
Expand Down
4 changes: 2 additions & 2 deletions extensions/spark/kyuubi-spark-authz/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@ build/mvn clean package -DskipTests -pl :kyuubi-spark-authz_2.12 -am -Dspark.ver
`-Dspark.version=`

- [x] master
- [x] 3.5.x
- [x] 3.4.x (default)
- [x] 3.5.x (default)
- [x] 3.4.x
- [x] 3.3.x
- [x] 3.2.x
- [x] 3.1.x
Expand Down
5 changes: 3 additions & 2 deletions extensions/spark/kyuubi-spark-lineage/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,9 @@ build/mvn clean package -DskipTests -pl :kyuubi-spark-lineage_2.12 -am -Dspark.v
`-Dspark.version=`

- [x] master
- [ ] 3.4.x
- [x] 3.3.x (default)
- [x] 3.5.x (default)
- [x] 3.4.x
- [x] 3.3.x
- [x] 3.2.x
- [x] 3.1.x

Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ class KyuubiOnKubernetesWithSparkTestsBase extends WithKyuubiServerOnKubernetes
Map(
"spark.master" -> s"k8s://$miniKubeApiMaster",
// We should update spark docker image in ./github/workflows/master.yml at the same time
"spark.kubernetes.container.image" -> "apache/spark:3.4.2",
"spark.kubernetes.container.image" -> "apache/spark:3.5.1",
"spark.kubernetes.container.image.pullPolicy" -> "IfNotPresent",
"spark.executor.memory" -> "512M",
"spark.driver.memory" -> "1024M",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ abstract class SparkOnKubernetesSuiteBase
// TODO Support more Spark version
// Spark official docker image: https://hub.docker.com/r/apache/spark/tags
KyuubiConf().set("spark.master", s"k8s://$apiServerAddress")
.set("spark.kubernetes.container.image", "apache/spark:3.4.2")
.set("spark.kubernetes.container.image", "apache/spark:3.5.1")
.set("spark.kubernetes.container.image.pullPolicy", "IfNotPresent")
.set("spark.executor.instances", "1")
.set("spark.executor.memory", "512M")
Expand Down
12 changes: 6 additions & 6 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,8 @@
<commons-io.version>2.11.0</commons-io.version>
<commons-lang.version>2.6</commons-lang.version>
<commons-lang3.version>3.13.0</commons-lang3.version>
<delta.artifact>delta-core</delta.artifact>
<delta.version>2.4.0</delta.version>
<delta.artifact>delta-spark</delta.artifact>
<delta.version>3.1.0</delta.version>
<failsafe.verion>3.3.2</failsafe.verion>
<fb303.version>0.9.3</fb303.version>
<flexmark.version>0.62.2</flexmark.version>
Expand Down Expand Up @@ -198,10 +198,10 @@
<snakeyaml.version>2.2</snakeyaml.version>
<!--
DO NOT forget to change the following properties when change the minor version of Spark:
`delta.version`, `maven.plugin.scalatest.exclude.tags`
`delta.version`, `delta.artifact`, `maven.plugin.scalatest.exclude.tags`
-->
<spark.version>3.4.2</spark.version>
<spark.binary.version>3.4</spark.binary.version>
<spark.version>3.5.1</spark.version>
<spark.binary.version>3.5</spark.binary.version>
<spark.archive.scala.suffix></spark.archive.scala.suffix>
<spark.archive.name>spark-${spark.version}-bin-hadoop3${spark.archive.scala.suffix}.tgz</spark.archive.name>
<spark.archive.mirror>${apache.archive.dist}/spark/spark-${spark.version}</spark.archive.mirror>
Expand Down Expand Up @@ -235,7 +235,7 @@
<maven.plugin.frontend.version>1.12.1</maven.plugin.frontend.version>
<maven.plugin.scala.version>4.8.0</maven.plugin.scala.version>
<maven.plugin.scalatest.version>2.2.0</maven.plugin.scalatest.version>
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags>
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow</maven.plugin.scalatest.exclude.tags>
<maven.plugin.scalatest.include.tags></maven.plugin.scalatest.include.tags>
<maven.plugin.scalatest.debug.enabled>false</maven.plugin.scalatest.debug.enabled>
<maven.plugin.spotless.version>2.30.0</maven.plugin.spotless.version>
Expand Down

0 comments on commit f386aeb

Please sign in to comment.