aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMosharaf Chowdhury <mosharaf@mosharaf-ubuntu.(none)>2010-11-29 20:35:47 -0800
committerMosharaf Chowdhury <mosharaf@mosharaf-ubuntu.(none)>2010-11-29 20:35:47 -0800
commit191af65fb74459031bf1c77f389b41da835e1cf3 (patch)
treeff3b990c8f9f561076d4d48215876aef30a24a35
parentc62f2fefa50cc18d4e42d88205618089f9b225d3 (diff)
downloadspark-191af65fb74459031bf1c77f389b41da835e1cf3.tar.gz
spark-191af65fb74459031bf1c77f389b41da835e1cf3.tar.bz2
spark-191af65fb74459031bf1c77f389b41da835e1cf3.zip
- Using the new Cache implementation.
-rw-r--r--src/scala/spark/BitTorrentBroadcast.scala6
-rw-r--r--src/scala/spark/DfsBroadcast.scala4
2 files changed, 3 insertions, 7 deletions
diff --git a/src/scala/spark/BitTorrentBroadcast.scala b/src/scala/spark/BitTorrentBroadcast.scala
index 7dc376a3c5..60bed8511d 100644
--- a/src/scala/spark/BitTorrentBroadcast.scala
+++ b/src/scala/spark/BitTorrentBroadcast.scala
@@ -1,7 +1,5 @@
package spark
-import com.google.common.collect.MapMaker
-
import java.io._
import java.net._
import java.util.{BitSet, Comparator, Random, Timer, TimerTask, UUID}
@@ -150,7 +148,7 @@ extends Broadcast with Logging {
BitTorrentBroadcast.values.put (uuid, value_)
} else {
// TODO: This part won't work, cause HDFS writing is turned OFF
- val fileIn = new ObjectInputStream(BroadcastCH.openFileForReading(uuid))
+ val fileIn = new ObjectInputStream(DfsBroadcast.openFileForReading(uuid))
value_ = fileIn.readObject.asInstanceOf[T]
BitTorrentBroadcast.values.put(uuid, value_)
fileIn.close
@@ -1028,7 +1026,7 @@ extends Broadcast with Logging {
private object BitTorrentBroadcast
extends Logging {
- val values = new MapMaker ().softValues ().makeMap[UUID, Any]
+ val values = Cache.newKeySpace()
var valueToGuideMap = Map[UUID, SourceInfo] ()
diff --git a/src/scala/spark/DfsBroadcast.scala b/src/scala/spark/DfsBroadcast.scala
index 5be5f98e8c..a249961fd5 100644
--- a/src/scala/spark/DfsBroadcast.scala
+++ b/src/scala/spark/DfsBroadcast.scala
@@ -1,7 +1,5 @@
package spark
-import com.google.common.collect.MapMaker
-
import java.io._
import java.net._
import java.util.UUID
@@ -56,7 +54,7 @@ extends Broadcast with Logging {
private object DfsBroadcast
extends Logging {
- val values = new MapMaker ().softValues ().makeMap[UUID, Any]
+ val values = Cache.newKeySpace()
private var initialized = false