aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2013-10-15 23:14:27 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2013-10-15 23:14:27 -0700
commit4e46fde818a9b94960ab83fa6390952e2d5dd3e6 (patch)
tree6f5e20a2e5a12526532ec51d1b9a34faf393cb34
parentb5346064d6a2b6858e77b718d2fbadd691374282 (diff)
parent65b46236e7bb8273ab99252322ffff84752bb763 (diff)
downloadspark-4e46fde818a9b94960ab83fa6390952e2d5dd3e6.tar.gz
spark-4e46fde818a9b94960ab83fa6390952e2d5dd3e6.tar.bz2
spark-4e46fde818a9b94960ab83fa6390952e2d5dd3e6.zip
Merge pull request #62 from harveyfeng/master
Make TaskContext's stageId publicly accessible.
-rw-r--r--core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/TaskContext.scala2
2 files changed, 5 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
index 2bab9d6e3d..afa76a4a76 100644
--- a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
+++ b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
@@ -36,7 +36,10 @@ import org.apache.spark.SerializableWritable
* Saves the RDD using a JobConf, which should contain an output key class, an output value class,
* a filename to write to, etc, exactly like in a Hadoop MapReduce job.
*/
-class SparkHadoopWriter(@transient jobConf: JobConf) extends Logging with SparkHadoopMapRedUtil with Serializable {
+class SparkHadoopWriter(@transient jobConf: JobConf)
+ extends Logging
+ with SparkHadoopMapRedUtil
+ with Serializable {
private val now = new Date()
private val conf = new SerializableWritable(jobConf)
diff --git a/core/src/main/scala/org/apache/spark/TaskContext.scala b/core/src/main/scala/org/apache/spark/TaskContext.scala
index 51584d686d..cae983ed4c 100644
--- a/core/src/main/scala/org/apache/spark/TaskContext.scala
+++ b/core/src/main/scala/org/apache/spark/TaskContext.scala
@@ -22,7 +22,7 @@ import scala.collection.mutable.ArrayBuffer
import org.apache.spark.executor.TaskMetrics
class TaskContext(
- private[spark] val stageId: Int,
+ val stageId: Int,
val partitionId: Int,
val attemptId: Long,
val runningLocally: Boolean = false,