aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2015-08-14 20:56:55 -0700
committerReynold Xin <rxin@databricks.com>2015-08-14 20:56:55 -0700
commit37586e5449ff8f892d41f0b6b8fa1de83dd3849e (patch)
tree40ae7de2cc5c3f64891aed0c2e96d994398d34e2 /core
parente5fd60415fbfea2c5c02602f7ddbc999dd058064 (diff)
downloadspark-37586e5449ff8f892d41f0b6b8fa1de83dd3849e.tar.gz
spark-37586e5449ff8f892d41f0b6b8fa1de83dd3849e.tar.bz2
spark-37586e5449ff8f892d41f0b6b8fa1de83dd3849e.zip
[HOTFIX] fix duplicated braces
Author: Davies Liu <davies@databricks.com> Closes #8219 from davies/fix_typo.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManager.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala2
4 files changed, 6 insertions, 6 deletions
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
index 86493673d9..eedb27942e 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
@@ -222,7 +222,7 @@ private[spark] class BlockManager(
return
} catch {
case e: Exception if i < MAX_ATTEMPTS =>
- logError(s"Failed to connect to external shuffle server, will retry ${MAX_ATTEMPTS - i}}"
+ logError(s"Failed to connect to external shuffle server, will retry ${MAX_ATTEMPTS - i}"
+ s" more times after waiting $SLEEP_TIME_SECS seconds...", e)
Thread.sleep(SLEEP_TIME_SECS * 1000)
}
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
index f70f701494..2a11f371b9 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
@@ -103,7 +103,7 @@ class BlockManagerMaster(
val future = driverEndpoint.askWithRetry[Future[Seq[Int]]](RemoveRdd(rddId))
future.onFailure {
case e: Exception =>
- logWarning(s"Failed to remove RDD $rddId - ${e.getMessage}}", e)
+ logWarning(s"Failed to remove RDD $rddId - ${e.getMessage}", e)
}(ThreadUtils.sameThread)
if (blocking) {
timeout.awaitResult(future)
@@ -115,7 +115,7 @@ class BlockManagerMaster(
val future = driverEndpoint.askWithRetry[Future[Seq[Boolean]]](RemoveShuffle(shuffleId))
future.onFailure {
case e: Exception =>
- logWarning(s"Failed to remove shuffle $shuffleId - ${e.getMessage}}", e)
+ logWarning(s"Failed to remove shuffle $shuffleId - ${e.getMessage}", e)
}(ThreadUtils.sameThread)
if (blocking) {
timeout.awaitResult(future)
@@ -129,7 +129,7 @@ class BlockManagerMaster(
future.onFailure {
case e: Exception =>
logWarning(s"Failed to remove broadcast $broadcastId" +
- s" with removeFromMaster = $removeFromMaster - ${e.getMessage}}", e)
+ s" with removeFromMaster = $removeFromMaster - ${e.getMessage}", e)
}(ThreadUtils.sameThread)
if (blocking) {
timeout.awaitResult(future)
diff --git a/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
index ebead830c6..150d82b393 100644
--- a/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
+++ b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
@@ -181,7 +181,7 @@ private[spark] object ClosureCleaner extends Logging {
return
}
- logDebug(s"+++ Cleaning closure $func (${func.getClass.getName}}) +++")
+ logDebug(s"+++ Cleaning closure $func (${func.getClass.getName}) +++")
// A list of classes that represents closures enclosed in the given one
val innerClasses = getInnerClosureClasses(func)
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index f2abf227dc..fddc24dbfc 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1366,7 +1366,7 @@ private[spark] object Utils extends Logging {
file.getAbsolutePath, effectiveStartIndex, effectiveEndIndex))
}
sum += fileToLength(file)
- logDebug(s"After processing file $file, string built is ${stringBuffer.toString}}")
+ logDebug(s"After processing file $file, string built is ${stringBuffer.toString}")
}
stringBuffer.toString
}