aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authortedyu <yuzhihong@gmail.com>2015-11-10 16:52:26 -0800
committerAndrew Or <andrew@databricks.com>2015-11-10 16:52:59 -0800
commit900917541651abe7125f0d205085d2ab6a00d92c (patch)
treed63e9539267638227ee546f3964ca8c660cd22ee
parent3e0a6cf1e02a19b37c68d3026415d53bb57a576b (diff)
downloadspark-900917541651abe7125f0d205085d2ab6a00d92c.tar.gz
spark-900917541651abe7125f0d205085d2ab6a00d92c.tar.bz2
spark-900917541651abe7125f0d205085d2ab6a00d92c.zip
[SPARK-11615] Drop @VisibleForTesting annotation
See http://search-hadoop.com/m/q3RTtjpe8r1iRbTj2 for discussion. Summary: addition of VisibleForTesting annotation resulted in spark-shell malfunctioning. Author: tedyu <yuzhihong@gmail.com> Closes #9585 from tedyu/master.
-rw-r--r--core/src/main/scala/org/apache/spark/rpc/netty/Inbox.scala8
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/util/AsynchronousListenerBus.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala3
-rw-r--r--scalastyle-config.xml7
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala3
-rw-r--r--yarn/src/test/scala/org/apache/spark/network/shuffle/ShuffleTestAccessor.scala1
7 files changed, 14 insertions, 15 deletions
diff --git a/core/src/main/scala/org/apache/spark/rpc/netty/Inbox.scala b/core/src/main/scala/org/apache/spark/rpc/netty/Inbox.scala
index c72b588db5..464027f07c 100644
--- a/core/src/main/scala/org/apache/spark/rpc/netty/Inbox.scala
+++ b/core/src/main/scala/org/apache/spark/rpc/netty/Inbox.scala
@@ -21,8 +21,6 @@ import javax.annotation.concurrent.GuardedBy
import scala.util.control.NonFatal
-import com.google.common.annotations.VisibleForTesting
-
import org.apache.spark.{Logging, SparkException}
import org.apache.spark.rpc.{RpcAddress, RpcEndpoint, ThreadSafeRpcEndpoint}
@@ -193,8 +191,10 @@ private[netty] class Inbox(
def isEmpty: Boolean = inbox.synchronized { messages.isEmpty }
- /** Called when we are dropping a message. Test cases override this to test message dropping. */
- @VisibleForTesting
+ /**
+ * Called when we are dropping a message. Test cases override this to test message dropping.
+ * Exposed for testing.
+ */
protected def onDrop(message: InboxMessage): Unit = {
logWarning(s"Drop $message because $endpointRef is stopped")
}
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
index 77d034fa5b..ca37829216 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
@@ -21,8 +21,6 @@ import java.util.concurrent.TimeoutException
import scala.collection.mutable.{HashMap, HashSet, ListBuffer}
-import com.google.common.annotations.VisibleForTesting
-
import org.apache.spark._
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.executor.TaskMetrics
diff --git a/core/src/main/scala/org/apache/spark/util/AsynchronousListenerBus.scala b/core/src/main/scala/org/apache/spark/util/AsynchronousListenerBus.scala
index b8481eabc7..b3b54af972 100644
--- a/core/src/main/scala/org/apache/spark/util/AsynchronousListenerBus.scala
+++ b/core/src/main/scala/org/apache/spark/util/AsynchronousListenerBus.scala
@@ -20,7 +20,6 @@ package org.apache.spark.util
import java.util.concurrent._
import java.util.concurrent.atomic.AtomicBoolean
-import com.google.common.annotations.VisibleForTesting
import org.apache.spark.SparkContext
/**
@@ -119,8 +118,8 @@ private[spark] abstract class AsynchronousListenerBus[L <: AnyRef, E](name: Stri
* For testing only. Wait until there are no more events in the queue, or until the specified
* time has elapsed. Throw `TimeoutException` if the specified time elapsed before the queue
* emptied.
+ * Exposed for testing.
*/
- @VisibleForTesting
@throws(classOf[TimeoutException])
def waitUntilEmpty(timeoutMillis: Long): Unit = {
val finishTime = System.currentTimeMillis + timeoutMillis
@@ -137,8 +136,8 @@ private[spark] abstract class AsynchronousListenerBus[L <: AnyRef, E](name: Stri
/**
* For testing only. Return whether the listener daemon thread is still alive.
+ * Exposed for testing.
*/
- @VisibleForTesting
def listenerThreadIsAlive: Boolean = listenerThread.isAlive
/**
diff --git a/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala b/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala
index a44e72b7c1..bd6844d045 100644
--- a/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala
@@ -23,7 +23,6 @@ import java.util.Comparator
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable
-import com.google.common.annotations.VisibleForTesting
import com.google.common.io.ByteStreams
import org.apache.spark._
@@ -608,8 +607,8 @@ private[spark] class ExternalSorter[K, V, C](
*
* For now, we just merge all the spilled files in once pass, but this can be modified to
* support hierarchical merging.
+ * Exposed for testing.
*/
- @VisibleForTesting
def partitionedIterator: Iterator[(Int, Iterator[Product2[K, C]])] = {
val usingMap = aggregator.isDefined
val collection: WritablePartitionedPairCollection[K, C] = if (usingMap) map else buffer
diff --git a/scalastyle-config.xml b/scalastyle-config.xml
index 64a0c71bbe..050c3f3604 100644
--- a/scalastyle-config.xml
+++ b/scalastyle-config.xml
@@ -150,6 +150,13 @@ This file is divided into 3 sections:
// scalastyle:on println]]></customMessage>
</check>
+ <check customId="visiblefortesting" level="error" class="org.scalastyle.file.RegexChecker" enabled="true">
+ <parameters><parameter name="regex">@VisibleForTesting</parameter></parameters>
+ <customMessage><![CDATA[
+ @VisibleForTesting auses classpath issues. Please note this in the java doc instead (SPARK-11615).
+ ]]></customMessage>
+ </check>
+
<check customId="classforname" level="error" class="org.scalastyle.file.RegexChecker" enabled="true">
<parameters><parameter name="regex">Class\.forName</parameter></parameters>
<customMessage><![CDATA[
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
index c2142d03f4..77843f53b9 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
@@ -17,8 +17,6 @@
package org.apache.spark.sql.execution
-import com.google.common.annotations.VisibleForTesting
-
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.InternalRow
@@ -33,7 +31,6 @@ import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
*/
class QueryExecution(val sqlContext: SQLContext, val logical: LogicalPlan) {
- @VisibleForTesting
def assertAnalyzed(): Unit = sqlContext.analyzer.checkAnalysis(analyzed)
lazy val analyzed: LogicalPlan = sqlContext.analyzer.execute(logical)
diff --git a/yarn/src/test/scala/org/apache/spark/network/shuffle/ShuffleTestAccessor.scala b/yarn/src/test/scala/org/apache/spark/network/shuffle/ShuffleTestAccessor.scala
index aa46ec5100..94bf579dc8 100644
--- a/yarn/src/test/scala/org/apache/spark/network/shuffle/ShuffleTestAccessor.scala
+++ b/yarn/src/test/scala/org/apache/spark/network/shuffle/ShuffleTestAccessor.scala
@@ -19,7 +19,6 @@ package org.apache.spark.network.shuffle
import java.io.{IOException, File}
import java.util.concurrent.ConcurrentMap
-import com.google.common.annotations.VisibleForTesting
import org.apache.hadoop.yarn.api.records.ApplicationId
import org.fusesource.leveldbjni.JniDBFactory
import org.iq80.leveldb.{DB, Options}