From a7af6cd2eaf9f6ff491b9e1fabfc9c6f3d0f54bf Mon Sep 17 00:00:00 2001 From: Josh Rosen Date: Thu, 31 Mar 2016 13:52:59 -0700 Subject: [SPARK-14281][TESTS] Fix java8-tests and simplify their build This patch fixes a compilation / build break in Spark's `java8-tests` and refactors their POM to simplify the build. See individual commit messages for more details. Author: Josh Rosen Closes #12073 from JoshRosen/fix-java8-tests. --- .../src/test/java/org/apache/spark/Java8APISuite.java | 10 +++++----- .../java/org/apache/spark/streaming/Java8APISuite.java | 18 ++++++++++-------- .../java8-tests/src/test/resources/log4j.properties | 1 - 3 files changed, 15 insertions(+), 14 deletions(-) (limited to 'external/java8-tests/src') diff --git a/external/java8-tests/src/test/java/org/apache/spark/Java8APISuite.java b/external/java8-tests/src/test/java/org/apache/spark/Java8APISuite.java index c0b58e713f..6ac5ca9cf5 100644 --- a/external/java8-tests/src/test/java/org/apache/spark/Java8APISuite.java +++ b/external/java8-tests/src/test/java/org/apache/spark/Java8APISuite.java @@ -188,7 +188,7 @@ public class Java8APISuite implements Serializable { public void flatMap() { JavaRDD rdd = sc.parallelize(Arrays.asList("Hello World!", "The quick brown fox jumps over the lazy dog.")); - JavaRDD words = rdd.flatMap(x -> Arrays.asList(x.split(" "))); + JavaRDD words = rdd.flatMap(x -> Arrays.asList(x.split(" ")).iterator()); Assert.assertEquals("Hello", words.first()); Assert.assertEquals(11, words.count()); @@ -198,7 +198,7 @@ public class Java8APISuite implements Serializable { for (String word : s.split(" ")) { pairs2.add(new Tuple2<>(word, word)); } - return pairs2; + return pairs2.iterator(); }); Assert.assertEquals(new Tuple2<>("Hello", "Hello"), pairs.first()); @@ -209,7 +209,7 @@ public class Java8APISuite implements Serializable { for (String word : s.split(" ")) { lengths.add((double) word.length()); } - return lengths; + return lengths.iterator(); }); Assert.assertEquals(5.0, doubles.first(), 0.01); @@ -227,7 +227,7 @@ public class Java8APISuite implements Serializable { // Regression test for SPARK-668: JavaPairRDD swapped = - pairRDD.flatMapToPair(x -> Collections.singletonList(x.swap())); + pairRDD.flatMapToPair(x -> Collections.singletonList(x.swap()).iterator()); swapped.collect(); // There was never a bug here, but it's worth testing: @@ -242,7 +242,7 @@ public class Java8APISuite implements Serializable { while (iter.hasNext()) { sum += iter.next(); } - return Collections.singletonList(sum); + return Collections.singletonList(sum).iterator(); }); Assert.assertEquals("[3, 7]", partitionSums.collect().toString()); diff --git a/external/java8-tests/src/test/java/org/apache/spark/streaming/Java8APISuite.java b/external/java8-tests/src/test/java/org/apache/spark/streaming/Java8APISuite.java index 604d818ef1..67bc64a444 100644 --- a/external/java8-tests/src/test/java/org/apache/spark/streaming/Java8APISuite.java +++ b/external/java8-tests/src/test/java/org/apache/spark/streaming/Java8APISuite.java @@ -29,6 +29,7 @@ import org.junit.Test; import org.apache.spark.Accumulator; import org.apache.spark.HashPartitioner; +import org.apache.spark.api.java.Optional; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.function.PairFunction; @@ -95,7 +96,7 @@ public class Java8APISuite extends LocalJavaStreamingContext implements Serializ while (in.hasNext()) { out = out + in.next().toUpperCase(); } - return Lists.newArrayList(out); + return Lists.newArrayList(out).iterator(); }); JavaTestUtils.attachTestOutputStream(mapped); List> result = JavaTestUtils.runStreams(ssc, 2, 2); @@ -351,7 +352,8 @@ public class Java8APISuite extends LocalJavaStreamingContext implements Serializ Arrays.asList("a", "t", "h", "l", "e", "t", "i", "c", "s")); JavaDStream stream = JavaTestUtils.attachTestInputStream(ssc, inputData, 1); - JavaDStream flatMapped = stream.flatMap(s -> Lists.newArrayList(s.split("(?!^)"))); + JavaDStream flatMapped = stream.flatMap( + s -> Lists.newArrayList(s.split("(?!^)")).iterator()); JavaTestUtils.attachTestOutputStream(flatMapped); List> result = JavaTestUtils.runStreams(ssc, 3, 3); @@ -360,8 +362,8 @@ public class Java8APISuite extends LocalJavaStreamingContext implements Serializ @Test public void testForeachRDD() { - final Accumulator accumRdd = ssc.sc().accumulator(0); - final Accumulator accumEle = ssc.sc().accumulator(0); + final Accumulator accumRdd = ssc.sparkContext().accumulator(0); + final Accumulator accumEle = ssc.sparkContext().accumulator(0); List> inputData = Arrays.asList( Arrays.asList(1,1,1), Arrays.asList(1,1,1)); @@ -375,7 +377,7 @@ public class Java8APISuite extends LocalJavaStreamingContext implements Serializ }); // This is a test to make sure foreachRDD(VoidFunction2) can be called from Java - stream.foreachRDD((rdd, time) -> null); + stream.foreachRDD((rdd, time) -> { return; }); JavaTestUtils.runStreams(ssc, 2, 2); @@ -423,7 +425,7 @@ public class Java8APISuite extends LocalJavaStreamingContext implements Serializ for (String letter : s.split("(?!^)")) { out.add(new Tuple2<>(s.length(), letter)); } - return out; + return out.iterator(); }); JavaTestUtils.attachTestOutputStream(flatMapped); @@ -541,7 +543,7 @@ public class Java8APISuite extends LocalJavaStreamingContext implements Serializ Tuple2 next = in.next(); out.add(next.swap()); } - return out; + return out.iterator(); }); JavaTestUtils.attachTestOutputStream(reversed); @@ -598,7 +600,7 @@ public class Java8APISuite extends LocalJavaStreamingContext implements Serializ for (Character s : in._1().toCharArray()) { out.add(new Tuple2<>(in._2(), s.toString())); } - return out; + return out.iterator(); }); JavaTestUtils.attachTestOutputStream(flatMapped); diff --git a/external/java8-tests/src/test/resources/log4j.properties b/external/java8-tests/src/test/resources/log4j.properties index eb3b1999eb..edbecdae92 100644 --- a/external/java8-tests/src/test/resources/log4j.properties +++ b/external/java8-tests/src/test/resources/log4j.properties @@ -25,4 +25,3 @@ log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{ # Ignore messages below warning level from Jetty, because it's a bit verbose log4j.logger.org.spark-project.jetty=WARN -org.spark-project.jetty.LEVEL=WARN -- cgit v1.2.3