aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/main/scala/org/apache/spark/network/SecurityMessage.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala2
-rw-r--r--examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java1
-rw-r--r--examples/src/main/java/org/apache/spark/examples/JavaTC.java2
-rw-r--r--examples/src/main/java/org/apache/spark/examples/sql/JavaSparkSQL.java1
7 files changed, 2 insertions, 8 deletions
diff --git a/core/src/main/scala/org/apache/spark/network/SecurityMessage.scala b/core/src/main/scala/org/apache/spark/network/SecurityMessage.scala
index a1dfc4094c..9af9e2e8e9 100644
--- a/core/src/main/scala/org/apache/spark/network/SecurityMessage.scala
+++ b/core/src/main/scala/org/apache/spark/network/SecurityMessage.scala
@@ -106,7 +106,6 @@ private[spark] class SecurityMessage() extends Logging {
* @return BufferMessage
*/
def toBufferMessage: BufferMessage = {
- val startTime = System.currentTimeMillis
val buffers = new ArrayBuffer[ByteBuffer]()
// 4 bytes for the length of the connectionId
diff --git a/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala
index b5111891ed..af26c3d59a 100644
--- a/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala
@@ -61,7 +61,6 @@ private[spark] class GroupedMeanEvaluator[T](totalOutputs: Int, confidence: Doub
} else if (outputsMerged == 0) {
new HashMap[T, BoundedDouble]
} else {
- val p = outputsMerged.toDouble / totalOutputs
val studentTCacher = new StudentTCacher(confidence)
val result = new JHashMap[T, BoundedDouble](sums.size)
val iter = sums.entrySet.iterator()
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala b/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala
index a02dd9441d..408a797088 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala
@@ -118,11 +118,9 @@ object BlockFetcherIterator {
})
bytesInFlight += req.size
val sizeMap = req.blocks.toMap // so we can look up the size of each blockID
- val fetchStart = System.currentTimeMillis()
val future = connectionManager.sendMessageReliably(cmId, blockMessageArray.toBufferMessage)
future.onSuccess {
case Some(message) => {
- val fetchDone = System.currentTimeMillis()
val bufferMessage = message.asInstanceOf[BufferMessage]
val blockMessageArray = BlockMessageArray.fromBufferMessage(bufferMessage)
for (blockMessage <- blockMessageArray) {
diff --git a/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala b/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala
index 7f220383f9..55b5713706 100644
--- a/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala
+++ b/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala
@@ -91,7 +91,7 @@ private[spark] object XORShiftRandom {
val xorRand = new XORShiftRandom(seed)
// this is just to warm up the JIT - we're not timing anything
- timeIt(1e6.toInt) {
+ timeIt(million) {
javaRand.nextInt()
xorRand.nextInt()
}
diff --git a/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java b/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
index 2a4278d3c3..3f7a879538 100644
--- a/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
@@ -75,7 +75,6 @@ public final class JavaLogQuery {
public static Tuple3<String, String, String> extractKey(String line) {
Matcher m = apacheLogRegex.matcher(line);
- List<String> key = Collections.emptyList();
if (m.find()) {
String ip = m.group(1);
String user = m.group(3);
diff --git a/examples/src/main/java/org/apache/spark/examples/JavaTC.java b/examples/src/main/java/org/apache/spark/examples/JavaTC.java
index 1d776940f0..d66b9ba265 100644
--- a/examples/src/main/java/org/apache/spark/examples/JavaTC.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaTC.java
@@ -85,7 +85,7 @@ public final class JavaTC {
}
});
- long oldCount = 0;
+ long oldCount;
long nextCount = tc.count();
do {
oldCount = nextCount;
diff --git a/examples/src/main/java/org/apache/spark/examples/sql/JavaSparkSQL.java b/examples/src/main/java/org/apache/spark/examples/sql/JavaSparkSQL.java
index b5b438e975..d62a72f534 100644
--- a/examples/src/main/java/org/apache/spark/examples/sql/JavaSparkSQL.java
+++ b/examples/src/main/java/org/apache/spark/examples/sql/JavaSparkSQL.java
@@ -23,7 +23,6 @@ import java.util.List;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
-import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.api.java.JavaSQLContext;
import org.apache.spark.sql.api.java.JavaSchemaRDD;