Skip to content

Commit

Permalink
Drop spark-3.1.x support for spark-rapids
Browse files Browse the repository at this point in the history
CI part for the issue: NVIDIA#10955

We'll drop spark-3.1.x support from branch-24.08, change the default spark version string to 3.2.0

This change needs to work together with the source code update for dropping spark-3.1.x support

Will keep monitoring CI jobs status after all the related changes get merged

Signed-off-by: Tim Liu <[email protected]>
  • Loading branch information
NvTimLiu committed Jul 3, 2024
1 parent e92cbd2 commit 830409c
Show file tree
Hide file tree
Showing 5 changed files with 11 additions and 10 deletions.
4 changes: 2 additions & 2 deletions jenkins/hadoop-def.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/bash
#
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2023-2024, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -20,7 +20,7 @@

set -e

spark_version=${1:-"3.1.1"}
spark_version=${1:-"3.2.0"}
scala_version=${2:-"2.12"}
# Split spark version into base version (e.g. 3.3.0) and suffix (e.g. SNAPSHOT)
PRE_IFS=$IFS
Expand Down
5 changes: 3 additions & 2 deletions jenkins/spark-nightly-build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,9 @@ export M2DIR=${M2DIR:-"$WORKSPACE/.m2"}
MVN="mvn -Dmaven.wagon.http.retryHandler.count=3 -DretryFailedDeploymentCount=3 ${MVN_OPT} -Psource-javadoc"

DIST_PL="dist"
## Get the default SPARK_VER from jenkins/version-def.sh
function mvnEval {
$MVN help:evaluate -q -pl $DIST_PL $MVN_URM_MIRROR -Prelease311 -Dmaven.repo.local=$M2DIR -DforceStdout -Dexpression=$1
$MVN help:evaluate -q -pl $DIST_PL $MVN_URM_MIRROR -Prelease${SPARK_VER//./} -Dmaven.repo.local=$M2DIR -DforceStdout -Dexpression=$1
}

ART_ID=$(mvnEval project.artifactId)
Expand Down Expand Up @@ -176,7 +177,7 @@ distWithReducedPom "install"
if [[ $SKIP_DEPLOY != 'true' ]]; then
distWithReducedPom "deploy"

# this deploys selected submodules that is unconditionally built with Spark 3.1.1
# this deploys selected submodules that is unconditionally built with $SPARK_VER
$MVN -B deploy -pl $DEPLOY_SUBMODULES \
-Dbuildver=$SPARK_BASE_SHIM_VERSION \
-DskipTests \
Expand Down
6 changes: 3 additions & 3 deletions jenkins/spark-premerge-build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -83,12 +83,12 @@ mvn_verify() {
# The jacoco coverage should have been collected, but because of how the shade plugin
# works and jacoco we need to clean some things up so jacoco will only report for the
# things we care about
SPK_VER=${JACOCO_SPARK_VER:-"311"}
SPK_VER=${JACOCO_SPARK_VER:-"320"}
mkdir -p target/jacoco_classes/
FILE=$(ls dist/target/rapids-4-spark_2.12-*.jar | grep -v test | xargs readlink -f)
UDF_JAR=$(ls ./udf-compiler/target/spark${SPK_VER}/rapids-4-spark-udf_2.12-*-spark${SPK_VER}.jar | grep -v test | xargs readlink -f)
pushd target/jacoco_classes/
jar xf $FILE com org rapids spark-shared "spark${JACOCO_SPARK_VER:-311}/"
jar xf $FILE com org rapids spark3xx-common "spark${JACOCO_SPARK_VER:-320}/"
# extract the .class files in udf jar and replace the existing ones in spark3xx-ommon and spark$SPK_VER
# because the class files in udf jar will be modified in aggregator's shade phase
jar xf "$UDF_JAR" com/nvidia/spark/udf
Expand Down Expand Up @@ -222,7 +222,7 @@ ci_scala213() {
}

prepare_spark() {
spark_ver=${1:-'3.1.1'}
spark_ver=${1:-'3.2.0'}
scala_ver=${2:-'2.12'}

ARTF_ROOT="$(pwd)/.download"
Expand Down
2 changes: 1 addition & 1 deletion jenkins/spark-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ $MVN_GET_CMD -DremoteRepositories=$PROJECT_TEST_REPO \
-DgroupId=com.nvidia -DartifactId=rapids-4-spark-integration-tests_$SCALA_BINARY_VER -Dversion=$PROJECT_TEST_VER -Dclassifier=pytest -Dpackaging=tar.gz

RAPIDS_INT_TESTS_HOME="$ARTF_ROOT/integration_tests/"
# The version of pytest.tar.gz that is uploaded is the one built against spark311 but its being pushed without classifier for now
# The version of pytest.tar.gz that is uploaded is the one built against spark320 but its being pushed without classifier for now
RAPIDS_INT_TESTS_TGZ="$ARTF_ROOT/rapids-4-spark-integration-tests_${SCALA_BINARY_VER}-$PROJECT_TEST_VER-pytest.tar.gz"

tmp_info=${TMP_INFO_FILE:-'/tmp/artifacts-build.info'}
Expand Down
4 changes: 2 additions & 2 deletions jenkins/version-def.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ CUDA_CLASSIFIER=${CUDA_CLASSIFIER:-"cuda11"}
CLASSIFIER=${CLASSIFIER:-"$CUDA_CLASSIFIER"} # default as CUDA_CLASSIFIER for compatibility
PROJECT_VER=${PROJECT_VER:-"24.08.0-SNAPSHOT"}
PROJECT_TEST_VER=${PROJECT_TEST_VER:-"24.08.0-SNAPSHOT"}
SPARK_VER=${SPARK_VER:-"3.1.1"}
SPARK_VER=${SPARK_VER:-"3.2.0"}
SPARK_VER_213=${SPARK_VER_213:-"3.3.0"}
# Make a best attempt to set the default value for the shuffle shim.
# Note that SPARK_VER for non-Apache Spark flavors (i.e. databricks,
Expand Down Expand Up @@ -85,7 +85,7 @@ fi
# PHASE_TYPE: CICD phase at which the script is called, to specify Spark shim versions.
# regular: noSnapshots + snapshots
# pre-release: noSnapshots only
# *: shim versions to build, e.g., PHASE_TYPE="311 321"
# *: shim versions to build, e.g., PHASE_TYPE="320 321"
PHASE_TYPE=${PHASE_TYPE:-"regular"}
case $PHASE_TYPE in
# SPARK_SHIM_VERSIONS will be used for nightly artifact build
Expand Down

0 comments on commit 830409c

Please sign in to comment.