diff options
author | Sean Owen <sowen@cloudera.com> | 2015-08-04 12:02:26 +0100 |
---|---|---|
committer | Sean Owen <sowen@cloudera.com> | 2015-08-04 12:02:26 +0100 |
commit | 76d74090d60f74412bd45487e8db6aff2e8343a2 (patch) | |
tree | df06579d8c0ab184fe17e1e1c611e01fcf4242a0 /core | |
parent | 9e952ecbce670e9b532a1c664a4d03b66e404112 (diff) | |
download | spark-76d74090d60f74412bd45487e8db6aff2e8343a2.tar.gz spark-76d74090d60f74412bd45487e8db6aff2e8343a2.tar.bz2 spark-76d74090d60f74412bd45487e8db6aff2e8343a2.zip |
[SPARK-9534] [BUILD] Enable javac lint for scalac parity; fix a lot of build warnings, 1.5.0 edition
Enable most javac lint warnings; fix a lot of build warnings. In a few cases, touch up surrounding code in the process.
I'll explain several of the changes inline in comments.
Author: Sean Owen <sowen@cloudera.com>
Closes #7862 from srowen/SPARK-9534 and squashes the following commits:
ea51618 [Sean Owen] Enable most javac lint warnings; fix a lot of build warnings. In a few cases, touch up surrounding code in the process.
Diffstat (limited to 'core')
4 files changed, 28 insertions, 16 deletions
diff --git a/core/src/main/java/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java b/core/src/main/java/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java index 2090efd3b9..d4c42b38ac 100644 --- a/core/src/main/java/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java +++ b/core/src/main/java/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java @@ -23,11 +23,13 @@ import java.util.List; // See // http://scala-programming-language.1934581.n4.nabble.com/Workaround-for-implementing-java-varargs-in-2-7-2-final-tp1944767p1944772.html abstract class JavaSparkContextVarargsWorkaround { - public <T> JavaRDD<T> union(JavaRDD<T>... rdds) { + + @SafeVarargs + public final <T> JavaRDD<T> union(JavaRDD<T>... rdds) { if (rdds.length == 0) { throw new IllegalArgumentException("Union called on empty list"); } - ArrayList<JavaRDD<T>> rest = new ArrayList<JavaRDD<T>>(rdds.length - 1); + List<JavaRDD<T>> rest = new ArrayList<>(rdds.length - 1); for (int i = 1; i < rdds.length; i++) { rest.add(rdds[i]); } @@ -38,18 +40,19 @@ abstract class JavaSparkContextVarargsWorkaround { if (rdds.length == 0) { throw new IllegalArgumentException("Union called on empty list"); } - ArrayList<JavaDoubleRDD> rest = new ArrayList<JavaDoubleRDD>(rdds.length - 1); + List<JavaDoubleRDD> rest = new ArrayList<>(rdds.length - 1); for (int i = 1; i < rdds.length; i++) { rest.add(rdds[i]); } return union(rdds[0], rest); } - public <K, V> JavaPairRDD<K, V> union(JavaPairRDD<K, V>... rdds) { + @SafeVarargs + public final <K, V> JavaPairRDD<K, V> union(JavaPairRDD<K, V>... rdds) { if (rdds.length == 0) { throw new IllegalArgumentException("Union called on empty list"); } - ArrayList<JavaPairRDD<K, V>> rest = new ArrayList<JavaPairRDD<K, V>>(rdds.length - 1); + List<JavaPairRDD<K, V>> rest = new ArrayList<>(rdds.length - 1); for (int i = 1; i < rdds.length; i++) { rest.add(rdds[i]); } @@ -57,7 +60,7 @@ abstract class JavaSparkContextVarargsWorkaround { } // These methods take separate "first" and "rest" elements to avoid having the same type erasure - abstract public <T> JavaRDD<T> union(JavaRDD<T> first, List<JavaRDD<T>> rest); - abstract public JavaDoubleRDD union(JavaDoubleRDD first, List<JavaDoubleRDD> rest); - abstract public <K, V> JavaPairRDD<K, V> union(JavaPairRDD<K, V> first, List<JavaPairRDD<K, V>> rest); + public abstract <T> JavaRDD<T> union(JavaRDD<T> first, List<JavaRDD<T>> rest); + public abstract JavaDoubleRDD union(JavaDoubleRDD first, List<JavaDoubleRDD> rest); + public abstract <K, V> JavaPairRDD<K, V> union(JavaPairRDD<K, V> first, List<JavaPairRDD<K, V>> rest); } diff --git a/core/src/main/scala/org/apache/spark/storage/TachyonBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/TachyonBlockManager.scala index b53c86e89a..ebad5bc5ab 100644 --- a/core/src/main/scala/org/apache/spark/storage/TachyonBlockManager.scala +++ b/core/src/main/scala/org/apache/spark/storage/TachyonBlockManager.scala @@ -27,9 +27,10 @@ import scala.util.control.NonFatal import com.google.common.io.ByteStreams import tachyon.client.{ReadType, WriteType, TachyonFS, TachyonFile} +import tachyon.conf.TachyonConf import tachyon.TachyonURI -import org.apache.spark.{SparkException, SparkConf, Logging} +import org.apache.spark.Logging import org.apache.spark.executor.ExecutorExitCode import org.apache.spark.util.Utils @@ -60,7 +61,11 @@ private[spark] class TachyonBlockManager() extends ExternalBlockManager with Log rootDirs = s"$storeDir/$appFolderName/$executorId" master = blockManager.conf.get(ExternalBlockStore.MASTER_URL, "tachyon://localhost:19998") - client = if (master != null && master != "") TachyonFS.get(new TachyonURI(master)) else null + client = if (master != null && master != "") { + TachyonFS.get(new TachyonURI(master), new TachyonConf()) + } else { + null + } // original implementation call System.exit, we change it to run without extblkstore support if (client == null) { logError("Failed to connect to the Tachyon as the master address is not configured") diff --git a/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala b/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala index 11e87bd1dd..34775577de 100644 --- a/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala @@ -73,11 +73,11 @@ class PersistenceEngineSuite extends SparkFunSuite { assert(persistenceEngine.read[String]("test_").isEmpty) // Test deserializing objects that contain RpcEndpointRef - val rpcEnv = RpcEnv.create("test", "localhost", 12345, conf, new SecurityManager(conf)) + val testRpcEnv = RpcEnv.create("test", "localhost", 12345, conf, new SecurityManager(conf)) try { // Create a real endpoint so that we can test RpcEndpointRef deserialization - val workerEndpoint = rpcEnv.setupEndpoint("worker", new RpcEndpoint { - override val rpcEnv: RpcEnv = rpcEnv + val workerEndpoint = testRpcEnv.setupEndpoint("worker", new RpcEndpoint { + override val rpcEnv: RpcEnv = testRpcEnv }) val workerToPersist = new WorkerInfo( @@ -93,7 +93,8 @@ class PersistenceEngineSuite extends SparkFunSuite { persistenceEngine.addWorker(workerToPersist) - val (storedApps, storedDrivers, storedWorkers) = persistenceEngine.readPersistedData(rpcEnv) + val (storedApps, storedDrivers, storedWorkers) = + persistenceEngine.readPersistedData(testRpcEnv) assert(storedApps.isEmpty) assert(storedDrivers.isEmpty) @@ -110,8 +111,8 @@ class PersistenceEngineSuite extends SparkFunSuite { assert(workerToPersist.webUiPort === recoveryWorkerInfo.webUiPort) assert(workerToPersist.publicAddress === recoveryWorkerInfo.publicAddress) } finally { - rpcEnv.shutdown() - rpcEnv.awaitTermination() + testRpcEnv.shutdown() + testRpcEnv.awaitTermination() } } diff --git a/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtilsSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtilsSuite.scala index b354914b6f..2eb43b7313 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtilsSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtilsSuite.scala @@ -17,10 +17,13 @@ package org.apache.spark.scheduler.cluster.mesos +import scala.language.reflectiveCalls + import org.apache.mesos.Protos.Value import org.mockito.Mockito._ import org.scalatest._ import org.scalatest.mock.MockitoSugar + import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite} class MesosSchedulerUtilsSuite extends SparkFunSuite with Matchers with MockitoSugar { |