diff options
author | Marcelo Vanzin <vanzin@cloudera.com> | 2016-04-04 16:52:21 -0700 |
---|---|---|
committer | Josh Rosen <joshrosen@databricks.com> | 2016-04-04 16:52:22 -0700 |
commit | 24d7d2e453ab5eef6099a32fb9e8ed60f6ada93a (patch) | |
tree | 2069beb0e471afa4e1b1867efe786100b7f77f79 /pom.xml | |
parent | 400b2f863ffaa01a34a8dae1541c61526fef908b (diff) | |
download | spark-24d7d2e453ab5eef6099a32fb9e8ed60f6ada93a.tar.gz spark-24d7d2e453ab5eef6099a32fb9e8ed60f6ada93a.tar.bz2 spark-24d7d2e453ab5eef6099a32fb9e8ed60f6ada93a.zip |
[SPARK-13579][BUILD] Stop building the main Spark assembly.
This change modifies the "assembly/" module to just copy needed
dependencies to its build directory, and modifies the packaging
script to pick those up (and remove duplicate jars packages in the
examples module).
I also made some minor adjustments to dependencies to remove some
test jars from the final packaging, and remove jars that conflict with each
other when packaged separately (e.g. servlet api).
Also note that this change restores guava in applications' classpaths, even
though it's still shaded inside Spark. This is now needed for the Hadoop
libraries that are packaged with Spark, which now are not processed by
the shade plugin.
Author: Marcelo Vanzin <vanzin@cloudera.com>
Closes #11796 from vanzin/SPARK-13579.
Diffstat (limited to 'pom.xml')
-rw-r--r-- | pom.xml | 44 |
1 files changed, 31 insertions, 13 deletions
@@ -185,6 +185,10 @@ <!-- Modules that copy jars to the build directory should do so under this location. --> <jars.target.dir>${project.build.directory}/scala-${scala.binary.version}/jars</jars.target.dir> + <!-- Allow modules to enable / disable certain build plugins easily. --> + <build.testJarPhase>prepare-package</build.testJarPhase> + <build.copyDependenciesPhase>none</build.copyDependenciesPhase> + <!-- Dependency scopes that can be overridden by enabling certain profiles. These profiles are declared in the projects that build assemblies. @@ -238,15 +242,6 @@ </pluginRepositories> <dependencies> <!-- - This is a dummy dependency that is used along with the shading plug-in - to create effective poms on publishing (see SPARK-3812). - --> - <dependency> - <groupId>org.spark-project.spark</groupId> - <artifactId>unused</artifactId> - <version>1.0.0</version> - </dependency> - <!-- This is needed by the scalatest plugin, and so is declared here to be available in all child modules, just as scalatest is run in all children --> @@ -833,6 +828,14 @@ </exclusion> </exclusions> </dependency> + <!-- avro-mapred for some reason depends on avro-ipc's test jar, so undo that. --> + <dependency> + <groupId>org.apache.avro</groupId> + <artifactId>avro-ipc</artifactId> + <classifier>tests</classifier> + <version>${avro.version}</version> + <scope>test</scope> + </dependency> <dependency> <groupId>org.apache.avro</groupId> <artifactId>avro-mapred</artifactId> @@ -1521,6 +1524,10 @@ <groupId>org.codehaus.groovy</groupId> <artifactId>groovy-all</artifactId> </exclusion> + <exclusion> + <groupId>javax.servlet</groupId> + <artifactId>servlet-api</artifactId> + </exclusion> </exclusions> </dependency> @@ -1916,6 +1923,7 @@ --> <SPARK_DIST_CLASSPATH>${test_classpath}</SPARK_DIST_CLASSPATH> <SPARK_PREPEND_CLASSES>1</SPARK_PREPEND_CLASSES> + <SPARK_SCALA_VERSION>${scala.binary.version}</SPARK_SCALA_VERSION> <SPARK_TESTING>1</SPARK_TESTING> <JAVA_HOME>${test.java.home}</JAVA_HOME> </environmentVariables> @@ -1964,6 +1972,7 @@ --> <SPARK_DIST_CLASSPATH>${test_classpath}</SPARK_DIST_CLASSPATH> <SPARK_PREPEND_CLASSES>1</SPARK_PREPEND_CLASSES> + <SPARK_SCALA_VERSION>${scala.binary.version}</SPARK_SCALA_VERSION> <SPARK_TESTING>1</SPARK_TESTING> <JAVA_HOME>${test.java.home}</JAVA_HOME> </environmentVariables> @@ -2146,6 +2155,7 @@ <version>2.10</version> <executions> <execution> + <id>generate-test-classpath</id> <phase>test-compile</phase> <goals> <goal>build-classpath</goal> @@ -2155,6 +2165,17 @@ <outputProperty>test_classpath</outputProperty> </configuration> </execution> + <execution> + <id>copy-module-dependencies</id> + <phase>${build.copyDependenciesPhase}</phase> + <goals> + <goal>copy-dependencies</goal> + </goals> + <configuration> + <includeScope>runtime</includeScope> + <outputDirectory>${jars.target.dir}</outputDirectory> + </configuration> + </execution> </executions> </plugin> @@ -2169,9 +2190,6 @@ <shadedArtifactAttached>false</shadedArtifactAttached> <artifactSet> <includes> - <!-- At a minimum we must include this to force effective pom generation --> - <include>org.spark-project.spark:unused</include> - <include>org.eclipse.jetty:jetty-io</include> <include>org.eclipse.jetty:jetty-http</include> <include>org.eclipse.jetty:jetty-continuation</include> @@ -2302,7 +2320,7 @@ <executions> <execution> <id>prepare-test-jar</id> - <phase>prepare-package</phase> + <phase>${build.testJarPhase}</phase> <goals> <goal>test-jar</goal> </goals> |