diff options
author | Ismael Juma <ismael@juma.me.uk> | 2011-08-02 10:25:16 +0100 |
---|---|---|
committer | Ismael Juma <ismael@juma.me.uk> | 2011-08-02 10:25:16 +0100 |
commit | 620de2dd1da3d176078e6a7fa700db20f36db379 (patch) | |
tree | cdfec88691daf4f9aa61f8d5b33fbee71f6c101a /core | |
parent | 0fba22b3d216548e5e47a23a1b2e84e0e46835e9 (diff) | |
download | spark-620de2dd1da3d176078e6a7fa700db20f36db379.tar.gz spark-620de2dd1da3d176078e6a7fa700db20f36db379.tar.bz2 spark-620de2dd1da3d176078e6a7fa700db20f36db379.zip |
Change currentThread to Thread.currentThread as the former is deprecated.
Diffstat (limited to 'core')
-rw-r--r-- | core/src/main/scala/spark/Accumulators.scala | 6 | ||||
-rw-r--r-- | core/src/main/scala/spark/JavaSerializer.scala | 2 | ||||
-rw-r--r-- | core/src/main/scala/spark/LocalScheduler.scala | 2 | ||||
-rw-r--r-- | core/src/main/scala/spark/SimpleJob.scala | 2 | ||||
-rw-r--r-- | core/src/main/scala/spark/broadcast/Broadcast.scala | 2 |
5 files changed, 7 insertions, 7 deletions
diff --git a/core/src/main/scala/spark/Accumulators.scala b/core/src/main/scala/spark/Accumulators.scala index a808536146..094a95d70e 100644 --- a/core/src/main/scala/spark/Accumulators.scala +++ b/core/src/main/scala/spark/Accumulators.scala @@ -52,20 +52,20 @@ private object Accumulators if (original) { originals(a.id) = a } else { - val accums = localAccums.getOrElseUpdate(currentThread, Map()) + val accums = localAccums.getOrElseUpdate(Thread.currentThread, Map()) accums(a.id) = a } } // Clear the local (non-original) accumulators for the current thread def clear: Unit = synchronized { - localAccums.remove(currentThread) + localAccums.remove(Thread.currentThread) } // Get the values of the local accumulators for the current thread (by ID) def values: Map[Long, Any] = synchronized { val ret = Map[Long, Any]() - for ((id, accum) <- localAccums.getOrElse(currentThread, Map())) + for ((id, accum) <- localAccums.getOrElse(Thread.currentThread, Map())) ret(id) = accum.value return ret } diff --git a/core/src/main/scala/spark/JavaSerializer.scala b/core/src/main/scala/spark/JavaSerializer.scala index af390d55d8..e7cd4364ee 100644 --- a/core/src/main/scala/spark/JavaSerializer.scala +++ b/core/src/main/scala/spark/JavaSerializer.scala @@ -12,7 +12,7 @@ class JavaSerializationStream(out: OutputStream) extends SerializationStream { class JavaDeserializationStream(in: InputStream) extends DeserializationStream { val objIn = new ObjectInputStream(in) { override def resolveClass(desc: ObjectStreamClass) = - Class.forName(desc.getName, false, currentThread.getContextClassLoader) + Class.forName(desc.getName, false, Thread.currentThread.getContextClassLoader) } def readObject[T](): T = objIn.readObject().asInstanceOf[T] diff --git a/core/src/main/scala/spark/LocalScheduler.scala b/core/src/main/scala/spark/LocalScheduler.scala index e43516c84b..1044bf18aa 100644 --- a/core/src/main/scala/spark/LocalScheduler.scala +++ b/core/src/main/scala/spark/LocalScheduler.scala @@ -33,7 +33,7 @@ private class LocalScheduler(threads: Int) extends DAGScheduler with Logging { val bytes = Utils.serialize(tasks(i)) logInfo("Size of task " + i + " is " + bytes.size + " bytes") val deserializedTask = Utils.deserialize[Task[_]]( - bytes, currentThread.getContextClassLoader) + bytes, Thread.currentThread.getContextClassLoader) val result: Any = deserializedTask.run(myAttemptId) val accumUpdates = Accumulators.values logInfo("Finished task " + i) diff --git a/core/src/main/scala/spark/SimpleJob.scala b/core/src/main/scala/spark/SimpleJob.scala index 2001205878..9eee747cfd 100644 --- a/core/src/main/scala/spark/SimpleJob.scala +++ b/core/src/main/scala/spark/SimpleJob.scala @@ -27,7 +27,7 @@ extends Job(jobId) with Logging // Maximum times a task is allowed to fail before failing the job val MAX_TASK_FAILURES = 4 - val callingThread = currentThread + val callingThread = Thread.currentThread val tasks = tasksSeq.toArray val numTasks = tasks.length val launched = new Array[Boolean](numTasks) diff --git a/core/src/main/scala/spark/broadcast/Broadcast.scala b/core/src/main/scala/spark/broadcast/Broadcast.scala index 837129c665..f492ca762c 100644 --- a/core/src/main/scala/spark/broadcast/Broadcast.scala +++ b/core/src/main/scala/spark/broadcast/Broadcast.scala @@ -182,7 +182,7 @@ extends Logging with Serializable { private def byteArrayToObject[OUT](bytes: Array[Byte]): OUT = { val in = new ObjectInputStream (new ByteArrayInputStream (bytes)){ override def resolveClass(desc: ObjectStreamClass) = - Class.forName(desc.getName, false, currentThread.getContextClassLoader) + Class.forName(desc.getName, false, Thread.currentThread.getContextClassLoader) } val retVal = in.readObject.asInstanceOf[OUT] in.close() |