Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Drop spark-3.1.x support for spark-rapids #11041

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions jenkins/hadoop-def.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/bash
#
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2023-2024, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -20,7 +20,7 @@

set -e

spark_version=${1:-"3.1.1"}
spark_version=${1:-"3.2.0"}
scala_version=${2:-"2.12"}
# Split spark version into base version (e.g. 3.3.0) and suffix (e.g. SNAPSHOT)
PRE_IFS=$IFS
Expand Down
5 changes: 3 additions & 2 deletions jenkins/spark-nightly-build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,9 @@ export M2DIR=${M2DIR:-"$WORKSPACE/.m2"}
MVN="mvn -Dmaven.wagon.http.retryHandler.count=3 -DretryFailedDeploymentCount=3 ${MVN_OPT} -Psource-javadoc"

DIST_PL="dist"
## Get the default SPARK_VER from jenkins/version-def.sh
function mvnEval {
$MVN help:evaluate -q -pl $DIST_PL $MVN_URM_MIRROR -Prelease311 -Dmaven.repo.local=$M2DIR -DforceStdout -Dexpression=$1
$MVN help:evaluate -q -pl $DIST_PL $MVN_URM_MIRROR -Prelease${SPARK_VER//./} -Dmaven.repo.local=$M2DIR -DforceStdout -Dexpression=$1
}

ART_ID=$(mvnEval project.artifactId)
Expand Down Expand Up @@ -176,7 +177,7 @@ distWithReducedPom "install"
if [[ $SKIP_DEPLOY != 'true' ]]; then
distWithReducedPom "deploy"

# this deploys selected submodules that is unconditionally built with Spark 3.1.1
# this deploys selected submodules that is unconditionally built with $SPARK_VER
$MVN -B deploy -pl $DEPLOY_SUBMODULES \
-Dbuildver=$SPARK_BASE_SHIM_VERSION \
-DskipTests \
Expand Down
8 changes: 4 additions & 4 deletions jenkins/spark-premerge-build.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/bash
#
# Copyright (c) 2020-2023, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2020-2024, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -83,12 +83,12 @@ mvn_verify() {
# The jacoco coverage should have been collected, but because of how the shade plugin
# works and jacoco we need to clean some things up so jacoco will only report for the
# things we care about
SPK_VER=${JACOCO_SPARK_VER:-"311"}
SPK_VER=${JACOCO_SPARK_VER:-"320"}
mkdir -p target/jacoco_classes/
FILE=$(ls dist/target/rapids-4-spark_2.12-*.jar | grep -v test | xargs readlink -f)
UDF_JAR=$(ls ./udf-compiler/target/spark${SPK_VER}/rapids-4-spark-udf_2.12-*-spark${SPK_VER}.jar | grep -v test | xargs readlink -f)
pushd target/jacoco_classes/
jar xf $FILE com org rapids spark3xx-common "spark${JACOCO_SPARK_VER:-311}/"
jar xf $FILE com org rapids spark3xx-common "spark${JACOCO_SPARK_VER:-320}/"
# extract the .class files in udf jar and replace the existing ones in spark3xx-ommon and spark$SPK_VER
# because the class files in udf jar will be modified in aggregator's shade phase
jar xf "$UDF_JAR" com/nvidia/spark/udf
Expand Down Expand Up @@ -219,7 +219,7 @@ ci_scala213() {
}

prepare_spark() {
spark_ver=${1:-'3.1.1'}
spark_ver=${1:-'3.2.0'}
scala_ver=${2:-'2.12'}

ARTF_ROOT="$(pwd)/.download"
Expand Down
2 changes: 1 addition & 1 deletion jenkins/spark-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ $MVN_GET_CMD -DremoteRepositories=$PROJECT_TEST_REPO \
-DgroupId=com.nvidia -DartifactId=rapids-4-spark-integration-tests_$SCALA_BINARY_VER -Dversion=$PROJECT_TEST_VER -Dclassifier=pytest -Dpackaging=tar.gz

RAPIDS_INT_TESTS_HOME="$ARTF_ROOT/integration_tests/"
# The version of pytest.tar.gz that is uploaded is the one built against spark311 but its being pushed without classifier for now
# The version of pytest.tar.gz that is uploaded is the one built against spark320 but its being pushed without classifier for now
RAPIDS_INT_TESTS_TGZ="$ARTF_ROOT/rapids-4-spark-integration-tests_${SCALA_BINARY_VER}-$PROJECT_TEST_VER-pytest.tar.gz"

tmp_info=${TMP_INFO_FILE:-'/tmp/artifacts-build.info'}
Expand Down
4 changes: 2 additions & 2 deletions jenkins/version-def.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ CUDA_CLASSIFIER=${CUDA_CLASSIFIER:-"cuda11"}
CLASSIFIER=${CLASSIFIER:-"$CUDA_CLASSIFIER"} # default as CUDA_CLASSIFIER for compatibility
PROJECT_VER=${PROJECT_VER:-"24.08.0-SNAPSHOT"}
PROJECT_TEST_VER=${PROJECT_TEST_VER:-"24.08.0-SNAPSHOT"}
SPARK_VER=${SPARK_VER:-"3.1.1"}
SPARK_VER=${SPARK_VER:-"3.2.0"}
SPARK_VER_213=${SPARK_VER_213:-"3.3.0"}
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

BTW do you know if anyone relies on the SPARK_VER_213 somewhere? it seems an unused one

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes seems it's unused

# Make a best attempt to set the default value for the shuffle shim.
# Note that SPARK_VER for non-Apache Spark flavors (i.e. databricks,
Expand Down Expand Up @@ -85,7 +85,7 @@ fi
# PHASE_TYPE: CICD phase at which the script is called, to specify Spark shim versions.
# regular: noSnapshots + snapshots
# pre-release: noSnapshots only
# *: shim versions to build, e.g., PHASE_TYPE="311 321"
# *: shim versions to build, e.g., PHASE_TYPE="320 321"
PHASE_TYPE=${PHASE_TYPE:-"regular"}
case $PHASE_TYPE in
# SPARK_SHIM_VERSIONS will be used for nightly artifact build
Expand Down
3 changes: 0 additions & 3 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -811,9 +811,6 @@

<spark.shim.dest>${project.basedir}/target/${spark.version.classifier}/generated/src</spark.shim.dest>
<noSnapshot.buildvers>
311,
312,
313,
320,
321,
321cdh,
Expand Down
3 changes: 0 additions & 3 deletions scala2.13/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -811,9 +811,6 @@

<spark.shim.dest>${project.basedir}/target/${spark.version.classifier}/generated/src</spark.shim.dest>
<noSnapshot.buildvers>
311,
312,
313,
320,
321,
321cdh,
Expand Down