From 24d7d2e453ab5eef6099a32fb9e8ed60f6ada93a Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Mon, 4 Apr 2016 16:52:21 -0700 Subject: [SPARK-13579][BUILD] Stop building the main Spark assembly. This change modifies the "assembly/" module to just copy needed dependencies to its build directory, and modifies the packaging script to pick those up (and remove duplicate jars packages in the examples module). I also made some minor adjustments to dependencies to remove some test jars from the final packaging, and remove jars that conflict with each other when packaged separately (e.g. servlet api). Also note that this change restores guava in applications' classpaths, even though it's still shaded inside Spark. This is now needed for the Hadoop libraries that are packaged with Spark, which now are not processed by the shade plugin. Author: Marcelo Vanzin Closes #11796 from vanzin/SPARK-13579. --- dev/make-distribution.sh | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) (limited to 'dev/make-distribution.sh') diff --git a/dev/make-distribution.sh b/dev/make-distribution.sh index dbdd42ff9e..4f7544f6ea 100755 --- a/dev/make-distribution.sh +++ b/dev/make-distribution.sh @@ -160,28 +160,35 @@ echo -e "\$ ${BUILD_COMMAND[@]}\n" # Make directories rm -rf "$DISTDIR" -mkdir -p "$DISTDIR/lib" +mkdir -p "$DISTDIR/jars" echo "Spark $VERSION$GITREVSTRING built for Hadoop $SPARK_HADOOP_VERSION" > "$DISTDIR/RELEASE" echo "Build flags: $@" >> "$DISTDIR/RELEASE" # Copy jars -cp "$SPARK_HOME"/assembly/target/scala*/*assembly*hadoop*.jar "$DISTDIR/lib/" -# This will fail if the -Pyarn profile is not provided -# In this case, silence the error and ignore the return code of this command -cp "$SPARK_HOME"/common/network-yarn/target/scala*/spark-*-yarn-shuffle.jar "$DISTDIR/lib/" &> /dev/null || : +cp "$SPARK_HOME"/assembly/target/scala*/jars/* "$DISTDIR/jars/" + +# Only create the yarn directory if the yarn artifacts were build. +if [ -f "$SPARK_HOME"/common/network-yarn/target/scala*/spark-*-yarn-shuffle.jar ]; then + mkdir "$DISTDIR"/yarn + cp "$SPARK_HOME"/common/network-yarn/target/scala*/spark-*-yarn-shuffle.jar "$DISTDIR/yarn" +fi # Copy examples and dependencies mkdir -p "$DISTDIR/examples/jars" cp "$SPARK_HOME"/examples/target/scala*/jars/* "$DISTDIR/examples/jars" +# Deduplicate jars that have already been packaged as part of the main Spark dependencies. +for f in "$DISTDIR/examples/jars/"*; do + name=$(basename "$f") + if [ -f "$DISTDIR/jars/$name" ]; then + rm "$DISTDIR/examples/jars/$name" + fi +done + # Copy example sources (needed for python and SQL) mkdir -p "$DISTDIR/examples/src/main" cp -r "$SPARK_HOME"/examples/src/main "$DISTDIR/examples/src/" -if [ "$SPARK_HIVE" == "1" ]; then - cp "$SPARK_HOME"/lib_managed/jars/datanucleus*.jar "$DISTDIR/lib/" -fi - # Copy license and ASF files cp "$SPARK_HOME/LICENSE" "$DISTDIR" cp -r "$SPARK_HOME/licenses" "$DISTDIR" -- cgit v1.2.3