diff options
-rw-r--r-- | assembly/pom.xml | 4 | ||||
-rw-r--r-- | core/pom.xml | 2 | ||||
-rw-r--r-- | core/src/test/java/org/apache/spark/JavaAPISuite.java | 26 |
3 files changed, 31 insertions, 1 deletions
diff --git a/assembly/pom.xml b/assembly/pom.xml index 604b1ab3de..5ec9da22ae 100644 --- a/assembly/pom.xml +++ b/assembly/pom.xml @@ -141,7 +141,9 @@ <include>com.google.common.**</include> </includes> <excludes> - <exclude>com.google.common.base.Optional**</exclude> + <exclude>com/google/common/base/Absent*</exclude> + <exclude>com/google/common/base/Optional*</exclude> + <exclude>com/google/common/base/Present*</exclude> </excludes> </relocation> </relocations> diff --git a/core/pom.xml b/core/pom.xml index 2a81f6df28..e012c5e673 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -343,7 +343,9 @@ <filter> <artifact>com.google.guava:guava</artifact> <includes> + <include>com/google/common/base/Absent*</include> <include>com/google/common/base/Optional*</include> + <include>com/google/common/base/Present*</include> </includes> </filter> </filters> diff --git a/core/src/test/java/org/apache/spark/JavaAPISuite.java b/core/src/test/java/org/apache/spark/JavaAPISuite.java index b8574dfb42..b8c23d524e 100644 --- a/core/src/test/java/org/apache/spark/JavaAPISuite.java +++ b/core/src/test/java/org/apache/spark/JavaAPISuite.java @@ -1307,4 +1307,30 @@ public class JavaAPISuite implements Serializable { SomeCustomClass[] collected = (SomeCustomClass[]) rdd.rdd().retag(SomeCustomClass.class).collect(); Assert.assertEquals(data.size(), collected.length); } + + /** + * Test for SPARK-3647. This test needs to use the maven-built assembly to trigger the issue, + * since that's the only artifact where Guava classes have been relocated. + */ + @Test + public void testGuavaOptional() { + // Stop the context created in setUp() and start a local-cluster one, to force usage of the + // assembly. + sc.stop(); + JavaSparkContext localCluster = new JavaSparkContext("local-cluster[1,1,512]", "JavaAPISuite"); + try { + JavaRDD<Integer> rdd1 = localCluster.parallelize(Arrays.asList(1, 2, null), 3); + JavaRDD<Optional<Integer>> rdd2 = rdd1.map( + new Function<Integer, Optional<Integer>>() { + @Override + public Optional<Integer> call(Integer i) { + return Optional.fromNullable(i); + } + }); + rdd2.collect(); + } finally { + localCluster.stop(); + } + } + } |