Skip to content

Commit

Permalink
Deploy the sources and Javadoc JARs in the nightly CICD
Browse files Browse the repository at this point in the history
Deploy the sources and javadoc JARS to make sure nightly CICD includes all jars required by Sonatype release

Deploy dist jars in the final step to ensure that the POM files are not overwritten

Signed-off-by: Tim Liu <[email protected]>
  • Loading branch information
NvTimLiu committed Jan 6, 2025
1 parent 4df6d60 commit 718bcc1
Showing 1 changed file with 15 additions and 2 deletions.
17 changes: 15 additions & 2 deletions jenkins/spark-nightly-build.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/bash
#
# Copyright (c) 2020-2024, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2020-2025, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -24,6 +24,7 @@ MVN="mvn -Dmaven.wagon.http.retryHandler.count=3 -DretryFailedDeploymentCount=3

DIST_PL="dist"
DIST_PATH="$DIST_PL" # The path of the dist module is used only outside of the mvn cmd
SQL_PLGUIN_PATH="sql-plugin"
SCALA_BINARY_VER=${SCALA_BINARY_VER:-"2.12"}
if [ $SCALA_BINARY_VER == "2.13" ]; then
# Run scala2.13 build and test against JDK17
Expand All @@ -33,6 +34,7 @@ if [ $SCALA_BINARY_VER == "2.13" ]; then

MVN="$MVN -f scala2.13/"
DIST_PATH="scala2.13/$DIST_PL"
SQL_PLGUIN_PATH="scala2.13/$SQL_PLGUIN_PATH"
fi

WORKSPACE=${WORKSPACE:-$(pwd)}
Expand Down Expand Up @@ -180,7 +182,6 @@ installDistArtifact ${DEFAULT_CUDA_CLASSIFIER}
distWithReducedPom "install"

if [[ $SKIP_DEPLOY != 'true' ]]; then
distWithReducedPom "deploy"

# this deploys selected submodules that is unconditionally built with Spark 3.2.0
$MVN -B deploy -pl "!${DIST_PL}" \
Expand All @@ -189,6 +190,18 @@ if [[ $SKIP_DEPLOY != 'true' ]]; then
-Dmaven.scaladoc.skip -Dmaven.scalastyle.skip=true \
$MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR \
-Dcuda.version=$DEFAULT_CUDA_CLASSIFIER

# dist module does not have javadoc and sources jars, use 'sql-plugin' ones instead
SQL_ART_PATH="$(echo -n $SQL_PLGUIN_PATH/target/spark*)/rapids-4-spark-sql_${SCALA_BINARY_VER}-${ART_VER}"
cp $SQL_ART_PATH-sources.jar $DIST_PATH/target/${ART_ID}-${ART_VER}-sources.jar
cp $SQL_ART_PATH-javadoc.jar $DIST_PATH/target/${ART_ID}-${ART_VER}-javadoc.jar
# Deploy the sources and javadoc to make sure nightly CICD includes all jars required by Sonatype release
DEPLOY_TYPES="${DEPLOY_TYPES},jar,jar"
DEPLOY_FILES="${DEPLOY_FILES},$DIST_PL/target/${ART_ID}-${ART_VER}-sources.jar,$DIST_PL/target/${ART_ID}-${ART_VER}-javadoc.jar"
DEPLOY_CLASSIFIERS="${DEPLOY_CLASSIFIERS},sources,javadoc"
ls ${DIST_PATH}/target/
# Deploy dist jars in the final step to ensure that the POM files are not overwritten
distWithReducedPom "deploy"
fi

# Parse Spark files from local mvn repo
Expand Down

0 comments on commit 718bcc1

Please sign in to comment.