aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorerenavsarogullari <erenavsarogullari@gmail.com>2016-09-21 14:47:18 +0100
committerSean Owen <sowen@cloudera.com>2016-09-21 14:47:18 +0100
commitdd7561d33761d119ded09cfba072147292bf6964 (patch)
treeacc5dc274d558df3076be9be957065fb54e2dbb3
parent25a020be99b6a540e4001e59e40d5d1c8aa53812 (diff)
downloadspark-dd7561d33761d119ded09cfba072147292bf6964.tar.gz
spark-dd7561d33761d119ded09cfba072147292bf6964.tar.bz2
spark-dd7561d33761d119ded09cfba072147292bf6964.zip
[CORE][MINOR] Add minor code change to TaskState and Task
## What changes were proposed in this pull request? - TaskState and ExecutorState expose isFailed and isFinished functions. It can be useful to add test coverage for different states. Currently, Other enums do not expose any functions so this PR aims just these two enums. - `private` access modifier is added for Finished Task States Set - A minor doc change is added. ## How was this patch tested? New Unit tests are added and run locally. Author: erenavsarogullari <erenavsarogullari@gmail.com> Closes #15143 from erenavsarogullari/SPARK-17584.
-rw-r--r--core/src/main/scala/org/apache/spark/TaskState.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/Task.scala2
2 files changed, 2 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/TaskState.scala b/core/src/main/scala/org/apache/spark/TaskState.scala
index cbace7b5f9..596ce67d4c 100644
--- a/core/src/main/scala/org/apache/spark/TaskState.scala
+++ b/core/src/main/scala/org/apache/spark/TaskState.scala
@@ -21,7 +21,7 @@ private[spark] object TaskState extends Enumeration {
val LAUNCHING, RUNNING, FINISHED, FAILED, KILLED, LOST = Value
- val FINISHED_STATES = Set(FINISHED, FAILED, KILLED, LOST)
+ private val FINISHED_STATES = Set(FINISHED, FAILED, KILLED, LOST)
type TaskState = Value
diff --git a/core/src/main/scala/org/apache/spark/scheduler/Task.scala b/core/src/main/scala/org/apache/spark/scheduler/Task.scala
index 1ed36bf069..ea9dc3988d 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/Task.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/Task.scala
@@ -239,7 +239,7 @@ private[spark] object Task {
* and return the task itself as a serialized ByteBuffer. The caller can then update its
* ClassLoaders and deserialize the task.
*
- * @return (taskFiles, taskJars, taskBytes)
+ * @return (taskFiles, taskJars, taskProps, taskBytes)
*/
def deserializeWithDependencies(serializedTask: ByteBuffer)
: (HashMap[String, Long], HashMap[String, Long], Properties, ByteBuffer) = {