diff options
author | Patrick Wendell <pwendell@gmail.com> | 2014-05-12 17:27:28 -0700 |
---|---|---|
committer | Patrick Wendell <pwendell@gmail.com> | 2014-05-12 17:27:28 -0700 |
commit | 925d8b249b84d2706c52f0d1e29fb8dcd6de452e (patch) | |
tree | 1e89d83275556cc759c95e8427a83c65e5ef2ca8 /core | |
parent | 3ce526b168050c572a1feee8e0121e1426f7d9ee (diff) | |
download | spark-925d8b249b84d2706c52f0d1e29fb8dcd6de452e.tar.gz spark-925d8b249b84d2706c52f0d1e29fb8dcd6de452e.tar.bz2 spark-925d8b249b84d2706c52f0d1e29fb8dcd6de452e.zip |
SPARK-1623: Use File objects instead of String's in HTTPBroadcast
This seems strictly better, and I think it's justified only the grounds of
clean-up. It might also fix issues with path conversions, but I haven't
yet isolated any instance of that happening.
/cc @srowen @tdas
Author: Patrick Wendell <pwendell@gmail.com>
Closes #749 from pwendell/broadcast-cleanup and squashes the following commits:
d6d54f2 [Patrick Wendell] SPARK-1623: Use File objects instead of string's in HTTPBroadcast
Diffstat (limited to 'core')
-rw-r--r-- | core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala index 78fc286e51..4f6cabaff2 100644 --- a/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala +++ b/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala @@ -112,7 +112,7 @@ private[spark] object HttpBroadcast extends Logging { private var securityManager: SecurityManager = null // TODO: This shouldn't be a global variable so that multiple SparkContexts can coexist - private val files = new TimeStampedHashSet[String] + private val files = new TimeStampedHashSet[File] private val httpReadTimeout = TimeUnit.MILLISECONDS.convert(5, TimeUnit.MINUTES).toInt private var compressionCodec: CompressionCodec = null private var cleaner: MetadataCleaner = null @@ -173,7 +173,7 @@ private[spark] object HttpBroadcast extends Logging { val serOut = ser.serializeStream(out) serOut.writeObject(value) serOut.close() - files += file.getAbsolutePath + files += file } def read[T: ClassTag](id: Long): T = { @@ -216,7 +216,7 @@ private[spark] object HttpBroadcast extends Logging { SparkEnv.get.blockManager.master.removeBroadcast(id, removeFromDriver, blocking) if (removeFromDriver) { val file = getFile(id) - files.remove(file.toString) + files.remove(file) deleteBroadcastFile(file) } } @@ -232,7 +232,7 @@ private[spark] object HttpBroadcast extends Logging { val (file, time) = (entry.getKey, entry.getValue) if (time < cleanupTime) { iterator.remove() - deleteBroadcastFile(new File(file.toString)) + deleteBroadcastFile(file) } } } |