aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/scala
diff options
context:
space:
mode:
authorYin Huai <huai@cse.ohio-state.edu>2014-07-25 11:14:51 -0700
committerMatei Zaharia <matei@databricks.com>2014-07-25 11:14:51 -0700
commit32bcf9af94b39f2c509eb54f8565fb659c70ca97 (patch)
tree2a54692d53a786a71726d45b007f0adf21339e06 /core/src/main/scala
parenta2715ccd9437fcdfa0b15e85ab4d0cec91aadf07 (diff)
downloadspark-32bcf9af94b39f2c509eb54f8565fb659c70ca97.tar.gz
spark-32bcf9af94b39f2c509eb54f8565fb659c70ca97.tar.bz2
spark-32bcf9af94b39f2c509eb54f8565fb659c70ca97.zip
[SPARK-2683] unidoc failed because org.apache.spark.util.CallSite uses Java keywords as value names
Renaming `short` to `shortForm` and `long` to `longForm`. JIRA: https://issues.apache.org/jira/browse/SPARK-2683 Author: Yin Huai <huai@cse.ohio-state.edu> Closes #1585 from yhuai/SPARK-2683 and squashes the following commits: 5ddb843 [Yin Huai] "short" and "long" are Java keyworks. In order to generate javadoc, renaming "short" to "shortForm" and "long" to "longForm".
Diffstat (limited to 'core/src/main/scala')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala12
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/RDD.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/Stage.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala6
5 files changed, 15 insertions, 13 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 8052499ab7..3e6addeaf0 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1037,7 +1037,7 @@ class SparkContext(config: SparkConf) extends Logging {
*/
private[spark] def getCallSite(): CallSite = {
Option(getLocalProperty("externalCallSite")) match {
- case Some(callSite) => CallSite(callSite, long = "")
+ case Some(callSite) => CallSite(callSite, longForm = "")
case None => Utils.getCallSite
}
}
@@ -1059,11 +1059,12 @@ class SparkContext(config: SparkConf) extends Logging {
}
val callSite = getCallSite
val cleanedFunc = clean(func)
- logInfo("Starting job: " + callSite.short)
+ logInfo("Starting job: " + callSite.shortForm)
val start = System.nanoTime
dagScheduler.runJob(rdd, cleanedFunc, partitions, callSite, allowLocal,
resultHandler, localProperties.get)
- logInfo("Job finished: " + callSite.short + ", took " + (System.nanoTime - start) / 1e9 + " s")
+ logInfo(
+ "Job finished: " + callSite.shortForm + ", took " + (System.nanoTime - start) / 1e9 + " s")
rdd.doCheckpoint()
}
@@ -1144,11 +1145,12 @@ class SparkContext(config: SparkConf) extends Logging {
evaluator: ApproximateEvaluator[U, R],
timeout: Long): PartialResult[R] = {
val callSite = getCallSite
- logInfo("Starting job: " + callSite.short)
+ logInfo("Starting job: " + callSite.shortForm)
val start = System.nanoTime
val result = dagScheduler.runApproximateJob(rdd, func, evaluator, callSite, timeout,
localProperties.get)
- logInfo("Job finished: " + callSite.short + ", took " + (System.nanoTime - start) / 1e9 + " s")
+ logInfo(
+ "Job finished: " + callSite.shortForm + ", took " + (System.nanoTime - start) / 1e9 + " s")
result
}
diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
index b1c965a790..a6abc49c53 100644
--- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
@@ -1225,7 +1225,7 @@ abstract class RDD[T: ClassTag](
/** User code that created this RDD (e.g. `textFile`, `parallelize`). */
@transient private[spark] val creationSite = Utils.getCallSite
- private[spark] def getCreationSite: String = Option(creationSite).map(_.short).getOrElse("")
+ private[spark] def getCreationSite: String = Option(creationSite).map(_.shortForm).getOrElse("")
private[spark] def elementClassTag: ClassTag[T] = classTag[T]
diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
index ede3c7d9f0..acb4c4946e 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
@@ -455,7 +455,7 @@ class DAGScheduler(
waiter.awaitResult() match {
case JobSucceeded => {}
case JobFailed(exception: Exception) =>
- logInfo("Failed to run " + callSite.short)
+ logInfo("Failed to run " + callSite.shortForm)
throw exception
}
}
@@ -679,7 +679,7 @@ class DAGScheduler(
val job = new ActiveJob(jobId, finalStage, func, partitions, callSite, listener, properties)
clearCacheLocs()
logInfo("Got job %s (%s) with %d output partitions (allowLocal=%s)".format(
- job.jobId, callSite.short, partitions.length, allowLocal))
+ job.jobId, callSite.shortForm, partitions.length, allowLocal))
logInfo("Final stage: " + finalStage + "(" + finalStage.name + ")")
logInfo("Parents of final stage: " + finalStage.parents)
logInfo("Missing parents: " + getMissingParentStages(finalStage))
diff --git a/core/src/main/scala/org/apache/spark/scheduler/Stage.scala b/core/src/main/scala/org/apache/spark/scheduler/Stage.scala
index 8ec482a6f6..798cbc598d 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/Stage.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/Stage.scala
@@ -108,8 +108,8 @@ private[spark] class Stage(
def attemptId: Int = nextAttemptId
- val name = callSite.short
- val details = callSite.long
+ val name = callSite.shortForm
+ val details = callSite.longForm
override def toString = "Stage " + id
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 5784e974fb..1a4f4eba98 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -44,7 +44,7 @@ import org.apache.spark.executor.ExecutorUncaughtExceptionHandler
import org.apache.spark.serializer.{DeserializationStream, SerializationStream, SerializerInstance}
/** CallSite represents a place in user code. It can have a short and a long form. */
-private[spark] case class CallSite(short: String, long: String)
+private[spark] case class CallSite(shortForm: String, longForm: String)
/**
* Various utility methods used by Spark.
@@ -848,8 +848,8 @@ private[spark] object Utils extends Logging {
}
val callStackDepth = System.getProperty("spark.callstack.depth", "20").toInt
CallSite(
- short = "%s at %s:%s".format(lastSparkMethod, firstUserFile, firstUserLine),
- long = callStack.take(callStackDepth).mkString("\n"))
+ shortForm = "%s at %s:%s".format(lastSparkMethod, firstUserFile, firstUserLine),
+ longForm = callStack.take(callStackDepth).mkString("\n"))
}
/** Return a string containing part of a file from byte 'start' to 'end'. */