aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2016-03-31 13:52:59 -0700
committerJosh Rosen <joshrosen@databricks.com>2016-03-31 13:52:59 -0700
commita7af6cd2eaf9f6ff491b9e1fabfc9c6f3d0f54bf (patch)
treeed2f42c05660c73bec8a1e396780d91205883bd8
parentb11887c086974dbab18b9f53e99a26bbe06e9c86 (diff)
downloadspark-a7af6cd2eaf9f6ff491b9e1fabfc9c6f3d0f54bf.tar.gz
spark-a7af6cd2eaf9f6ff491b9e1fabfc9c6f3d0f54bf.tar.bz2
spark-a7af6cd2eaf9f6ff491b9e1fabfc9c6f3d0f54bf.zip
[SPARK-14281][TESTS] Fix java8-tests and simplify their build
This patch fixes a compilation / build break in Spark's `java8-tests` and refactors their POM to simplify the build. See individual commit messages for more details. Author: Josh Rosen <joshrosen@databricks.com> Closes #12073 from JoshRosen/fix-java8-tests.
-rw-r--r--docs/building-spark.md8
-rw-r--r--external/java8-tests/README.md8
-rw-r--r--external/java8-tests/pom.xml75
-rw-r--r--external/java8-tests/src/test/java/org/apache/spark/Java8APISuite.java10
-rw-r--r--external/java8-tests/src/test/java/org/apache/spark/streaming/Java8APISuite.java18
-rw-r--r--external/java8-tests/src/test/resources/log4j.properties1
-rw-r--r--pom.xml31
7 files changed, 46 insertions, 105 deletions
diff --git a/docs/building-spark.md b/docs/building-spark.md
index 1e202acb9e..13aa80496e 100644
--- a/docs/building-spark.md
+++ b/docs/building-spark.md
@@ -180,14 +180,14 @@ For help in setting up IntelliJ IDEA or Eclipse for Spark development, and troub
Running only Java 8 tests and nothing else.
- mvn install -DskipTests -Pjava8-tests
+ mvn install -DskipTests
+ mvn -pl :java8-tests_2.11 test
or
- sbt -Pjava8-tests java8-tests/test
+ sbt java8-tests/test
-Java 8 tests are run when `-Pjava8-tests` profile is enabled, they will run in spite of `-DskipTests`.
-For these tests to run your system must have a JDK 8 installation.
+Java 8 tests are automatically enabled when a Java 8 JDK is detected.
If you have JDK 8 installed but it is not the system default, you can set JAVA_HOME to point to JDK 8 before running the tests.
# Building for PySpark on YARN
diff --git a/external/java8-tests/README.md b/external/java8-tests/README.md
index dc9e87f2ee..aa87901695 100644
--- a/external/java8-tests/README.md
+++ b/external/java8-tests/README.md
@@ -8,16 +8,14 @@ to your Java location. The set-up depends a bit on the build system:
`-java-home` to the sbt launch script. If a Java 8 JDK is detected sbt will automatically
include the Java 8 test project.
- `$ JAVA_HOME=/opt/jdk1.8.0/ build/sbt clean "test-only org.apache.spark.Java8APISuite"`
+ `$ JAVA_HOME=/opt/jdk1.8.0/ build/sbt clean java8-tests/test
* For Maven users,
- Maven users can also refer to their Java 8 directory using JAVA_HOME. However, Maven will not
- automatically detect the presence of a Java 8 JDK, so a special build profile `-Pjava8-tests`
- must be used.
+ Maven users can also refer to their Java 8 directory using JAVA_HOME.
`$ JAVA_HOME=/opt/jdk1.8.0/ mvn clean install -DskipTests`
- `$ JAVA_HOME=/opt/jdk1.8.0/ mvn test -Pjava8-tests -DwildcardSuites=org.apache.spark.Java8APISuite`
+ `$ JAVA_HOME=/opt/jdk1.8.0/ mvn -pl :java8-tests_2.11 test`
Note that the above command can only be run from project root directory since this module
depends on core and the test-jars of core and streaming. This means an install step is
diff --git a/external/java8-tests/pom.xml b/external/java8-tests/pom.xml
index 0ad9c5303a..f5a06467ee 100644
--- a/external/java8-tests/pom.xml
+++ b/external/java8-tests/pom.xml
@@ -27,7 +27,7 @@
<groupId>org.apache.spark</groupId>
<artifactId>java8-tests_2.11</artifactId>
<packaging>pom</packaging>
- <name>Spark Project Java8 Tests POM</name>
+ <name>Spark Project Java 8 Tests</name>
<properties>
<sbt.project.name>java8-tests</sbt.project.name>
@@ -64,11 +64,6 @@
</dependency>
</dependencies>
- <profiles>
- <profile>
- <id>java8-tests</id>
- </profile>
- </profiles>
<build>
<plugins>
<plugin>
@@ -87,74 +82,26 @@
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <executions>
- <execution>
- <id>test</id>
- <goals>
- <goal>test</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <systemPropertyVariables>
- <!-- For some reason surefire isn't setting this log4j file on the
- test classpath automatically. So we add it manually. -->
- <log4j.configuration>
- file:src/test/resources/log4j.properties
- </log4j.configuration>
- </systemPropertyVariables>
- <skipTests>false</skipTests>
- <includes>
- <include>**/Suite*.java</include>
- <include>**/*Suite.java</include>
- </includes>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
- <executions>
- <execution>
- <id>test-compile-first</id>
- <phase>process-test-resources</phase>
- <goals>
- <goal>testCompile</goal>
- </goals>
- </execution>
- </executions>
<configuration>
- <fork>true</fork>
- <verbose>true</verbose>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
<source>1.8</source>
- <compilerVersion>1.8</compilerVersion>
<target>1.8</target>
- <encoding>UTF-8</encoding>
- <maxmem>1024m</maxmem>
+ <compilerVersion>1.8</compilerVersion>
</configuration>
</plugin>
<plugin>
- <!-- disabled -->
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
- <executions>
- <execution>
- <phase>none</phase>
- </execution>
- <execution>
- <id>scala-compile-first</id>
- <phase>none</phase>
- </execution>
- <execution>
- <id>scala-test-compile-first</id>
- <phase>none</phase>
- </execution>
- <execution>
- <id>attach-scaladocs</id>
- <phase>none</phase>
- </execution>
- </executions>
+ <configuration>
+ <javacArgs>
+ <javacArg>-source</javacArg>
+ <javacArg>1.8</javacArg>
+ <javacArg>-target</javacArg>
+ <javacArg>1.8</javacArg>
+ <javacArg>-Xlint:all,-serial,-path</javacArg>
+ </javacArgs>
+ </configuration>
</plugin>
</plugins>
</build>
diff --git a/external/java8-tests/src/test/java/org/apache/spark/Java8APISuite.java b/external/java8-tests/src/test/java/org/apache/spark/Java8APISuite.java
index c0b58e713f..6ac5ca9cf5 100644
--- a/external/java8-tests/src/test/java/org/apache/spark/Java8APISuite.java
+++ b/external/java8-tests/src/test/java/org/apache/spark/Java8APISuite.java
@@ -188,7 +188,7 @@ public class Java8APISuite implements Serializable {
public void flatMap() {
JavaRDD<String> rdd = sc.parallelize(Arrays.asList("Hello World!",
"The quick brown fox jumps over the lazy dog."));
- JavaRDD<String> words = rdd.flatMap(x -> Arrays.asList(x.split(" ")));
+ JavaRDD<String> words = rdd.flatMap(x -> Arrays.asList(x.split(" ")).iterator());
Assert.assertEquals("Hello", words.first());
Assert.assertEquals(11, words.count());
@@ -198,7 +198,7 @@ public class Java8APISuite implements Serializable {
for (String word : s.split(" ")) {
pairs2.add(new Tuple2<>(word, word));
}
- return pairs2;
+ return pairs2.iterator();
});
Assert.assertEquals(new Tuple2<>("Hello", "Hello"), pairs.first());
@@ -209,7 +209,7 @@ public class Java8APISuite implements Serializable {
for (String word : s.split(" ")) {
lengths.add((double) word.length());
}
- return lengths;
+ return lengths.iterator();
});
Assert.assertEquals(5.0, doubles.first(), 0.01);
@@ -227,7 +227,7 @@ public class Java8APISuite implements Serializable {
// Regression test for SPARK-668:
JavaPairRDD<String, Integer> swapped =
- pairRDD.flatMapToPair(x -> Collections.singletonList(x.swap()));
+ pairRDD.flatMapToPair(x -> Collections.singletonList(x.swap()).iterator());
swapped.collect();
// There was never a bug here, but it's worth testing:
@@ -242,7 +242,7 @@ public class Java8APISuite implements Serializable {
while (iter.hasNext()) {
sum += iter.next();
}
- return Collections.singletonList(sum);
+ return Collections.singletonList(sum).iterator();
});
Assert.assertEquals("[3, 7]", partitionSums.collect().toString());
diff --git a/external/java8-tests/src/test/java/org/apache/spark/streaming/Java8APISuite.java b/external/java8-tests/src/test/java/org/apache/spark/streaming/Java8APISuite.java
index 604d818ef1..67bc64a444 100644
--- a/external/java8-tests/src/test/java/org/apache/spark/streaming/Java8APISuite.java
+++ b/external/java8-tests/src/test/java/org/apache/spark/streaming/Java8APISuite.java
@@ -29,6 +29,7 @@ import org.junit.Test;
import org.apache.spark.Accumulator;
import org.apache.spark.HashPartitioner;
+import org.apache.spark.api.java.Optional;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.PairFunction;
@@ -95,7 +96,7 @@ public class Java8APISuite extends LocalJavaStreamingContext implements Serializ
while (in.hasNext()) {
out = out + in.next().toUpperCase();
}
- return Lists.newArrayList(out);
+ return Lists.newArrayList(out).iterator();
});
JavaTestUtils.attachTestOutputStream(mapped);
List<List<String>> result = JavaTestUtils.runStreams(ssc, 2, 2);
@@ -351,7 +352,8 @@ public class Java8APISuite extends LocalJavaStreamingContext implements Serializ
Arrays.asList("a", "t", "h", "l", "e", "t", "i", "c", "s"));
JavaDStream<String> stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1);
- JavaDStream<String> flatMapped = stream.flatMap(s -> Lists.newArrayList(s.split("(?!^)")));
+ JavaDStream<String> flatMapped = stream.flatMap(
+ s -> Lists.newArrayList(s.split("(?!^)")).iterator());
JavaTestUtils.attachTestOutputStream(flatMapped);
List<List<String>> result = JavaTestUtils.runStreams(ssc, 3, 3);
@@ -360,8 +362,8 @@ public class Java8APISuite extends LocalJavaStreamingContext implements Serializ
@Test
public void testForeachRDD() {
- final Accumulator<Integer> accumRdd = ssc.sc().accumulator(0);
- final Accumulator<Integer> accumEle = ssc.sc().accumulator(0);
+ final Accumulator<Integer> accumRdd = ssc.sparkContext().accumulator(0);
+ final Accumulator<Integer> accumEle = ssc.sparkContext().accumulator(0);
List<List<Integer>> inputData = Arrays.asList(
Arrays.asList(1,1,1),
Arrays.asList(1,1,1));
@@ -375,7 +377,7 @@ public class Java8APISuite extends LocalJavaStreamingContext implements Serializ
});
// This is a test to make sure foreachRDD(VoidFunction2) can be called from Java
- stream.foreachRDD((rdd, time) -> null);
+ stream.foreachRDD((rdd, time) -> { return; });
JavaTestUtils.runStreams(ssc, 2, 2);
@@ -423,7 +425,7 @@ public class Java8APISuite extends LocalJavaStreamingContext implements Serializ
for (String letter : s.split("(?!^)")) {
out.add(new Tuple2<>(s.length(), letter));
}
- return out;
+ return out.iterator();
});
JavaTestUtils.attachTestOutputStream(flatMapped);
@@ -541,7 +543,7 @@ public class Java8APISuite extends LocalJavaStreamingContext implements Serializ
Tuple2<String, Integer> next = in.next();
out.add(next.swap());
}
- return out;
+ return out.iterator();
});
JavaTestUtils.attachTestOutputStream(reversed);
@@ -598,7 +600,7 @@ public class Java8APISuite extends LocalJavaStreamingContext implements Serializ
for (Character s : in._1().toCharArray()) {
out.add(new Tuple2<>(in._2(), s.toString()));
}
- return out;
+ return out.iterator();
});
JavaTestUtils.attachTestOutputStream(flatMapped);
diff --git a/external/java8-tests/src/test/resources/log4j.properties b/external/java8-tests/src/test/resources/log4j.properties
index eb3b1999eb..edbecdae92 100644
--- a/external/java8-tests/src/test/resources/log4j.properties
+++ b/external/java8-tests/src/test/resources/log4j.properties
@@ -25,4 +25,3 @@ log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{
# Ignore messages below warning level from Jetty, because it's a bit verbose
log4j.logger.org.spark-project.jetty=WARN
-org.spark-project.jetty.LEVEL=WARN
diff --git a/pom.xml b/pom.xml
index 25d6136421..37606926d7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1920,6 +1920,7 @@
<JAVA_HOME>${test.java.home}</JAVA_HOME>
</environmentVariables>
<systemProperties>
+ <log4j.configuration>file:src/test/resources/log4j.properties</log4j.configuration>
<derby.system.durability>test</derby.system.durability>
<java.awt.headless>true</java.awt.headless>
<java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir>
@@ -1935,6 +1936,14 @@
<failIfNoTests>false</failIfNoTests>
<excludedGroups>${test.exclude.tags}</excludedGroups>
</configuration>
+ <executions>
+ <execution>
+ <id>test</id>
+ <goals>
+ <goal>test</goal>
+ </goals>
+ </execution>
+ </executions>
</plugin>
<!-- Scalatest runs all Scala tests -->
<plugin>
@@ -1959,6 +1968,7 @@
<JAVA_HOME>${test.java.home}</JAVA_HOME>
</environmentVariables>
<systemProperties>
+ <log4j.configuration>file:src/test/resources/log4j.properties</log4j.configuration>
<derby.system.durability>test</derby.system.durability>
<java.awt.headless>true</java.awt.headless>
<java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir>
@@ -2343,27 +2353,12 @@
<profile>
<id>java8-tests</id>
- <build>
- <plugins>
- <!-- Needed for publishing test jars as it is needed by java8-tests -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- <executions>
- <execution>
- <goals>
- <goal>test-jar</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-
+ <activation>
+ <jdk>[1.8,)</jdk>
+ </activation>
<modules>
<module>external/java8-tests</module>
</modules>
-
</profile>
<profile>