aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMosharaf Chowdhury <mosharaf@mosharaf-ubuntu.(none)>2010-11-29 20:30:38 -0800
committerMosharaf Chowdhury <mosharaf@mosharaf-ubuntu.(none)>2010-11-29 20:30:38 -0800
commitc9cad03c319d950d8e8c4c34e7474c170b4c3aac (patch)
treecd74b67cdb2dea83dd17f68dd3d19995e6ef57fe
parent73714da568f172af073f003740700e2bee07837b (diff)
downloadspark-c9cad03c319d950d8e8c4c34e7474c170b4c3aac.tar.gz
spark-c9cad03c319d950d8e8c4c34e7474c170b4c3aac.tar.bz2
spark-c9cad03c319d950d8e8c4c34e7474c170b4c3aac.zip
- Using the new Cache implementation.
- Removed unused code related to dualMode (deprecated).
-rw-r--r--src/scala/spark/Broadcast.scala2
-rw-r--r--src/scala/spark/ChainedBroadcast.scala10
-rw-r--r--src/scala/spark/DfsBroadcast.scala4
3 files changed, 6 insertions, 10 deletions
diff --git a/src/scala/spark/Broadcast.scala b/src/scala/spark/Broadcast.scala
index afff500bb0..b4e896f096 100644
--- a/src/scala/spark/Broadcast.scala
+++ b/src/scala/spark/Broadcast.scala
@@ -67,7 +67,7 @@ extends Logging {
@serializable
case class SourceInfo (val hostAddress: String, val listenPort: Int,
- val totalBlocks: Int, val totalBytes: Int, val replicaID: Int)
+ val totalBlocks: Int, val totalBytes: Int)
extends Comparable [SourceInfo] with Logging {
var currentLeechers = 0
diff --git a/src/scala/spark/ChainedBroadcast.scala b/src/scala/spark/ChainedBroadcast.scala
index 3fc6a0fc90..ddeb93a8da 100644
--- a/src/scala/spark/ChainedBroadcast.scala
+++ b/src/scala/spark/ChainedBroadcast.scala
@@ -1,7 +1,5 @@
package spark
-import com.google.common.collect.MapMaker
-
import java.io._
import java.net._
import java.util.{Comparator, PriorityQueue, Random, UUID}
@@ -84,7 +82,7 @@ extends Broadcast with Logging {
pqOfSources = new PriorityQueue[SourceInfo]
val masterSource_0 =
- SourceInfo (hostAddress, listenPort, totalBlocks, totalBytes, 0)
+ SourceInfo (hostAddress, listenPort, totalBlocks, totalBytes)
pqOfSources.add (masterSource_0)
// Register with the Tracker
@@ -288,7 +286,7 @@ extends Broadcast with Logging {
logInfo ("Connected to Master's guiding object")
// Send local source information
- oosMaster.writeObject(SourceInfo (hostAddress, listenPort, -1, -1, 0))
+ oosMaster.writeObject(SourceInfo (hostAddress, listenPort, -1, -1))
oosMaster.flush
// Receive source information from Master
@@ -520,7 +518,7 @@ extends Broadcast with Logging {
// Add this new (if it can finish) source to the PQ of sources
thisWorkerInfo = SourceInfo (sourceInfo.hostAddress,
- sourceInfo.listenPort, totalBlocks, totalBytes, 0)
+ sourceInfo.listenPort, totalBlocks, totalBytes)
logInfo ("Adding possible new source to pqOfSources: " + thisWorkerInfo)
pqOfSources.add (thisWorkerInfo)
}
@@ -713,7 +711,7 @@ extends Broadcast with Logging {
private object ChainedBroadcast
extends Logging {
- val values = new MapMaker ().softValues ().makeMap[UUID, Any]
+ val values = Cache.newKeySpace()
var valueToGuidePortMap = Map[UUID, Int] ()
diff --git a/src/scala/spark/DfsBroadcast.scala b/src/scala/spark/DfsBroadcast.scala
index 5be5f98e8c..a249961fd5 100644
--- a/src/scala/spark/DfsBroadcast.scala
+++ b/src/scala/spark/DfsBroadcast.scala
@@ -1,7 +1,5 @@
package spark
-import com.google.common.collect.MapMaker
-
import java.io._
import java.net._
import java.util.UUID
@@ -56,7 +54,7 @@ extends Broadcast with Logging {
private object DfsBroadcast
extends Logging {
- val values = new MapMaker ().softValues ().makeMap[UUID, Any]
+ val values = Cache.newKeySpace()
private var initialized = false