aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorReynold Xin <rxin@apache.org>2014-09-30 15:55:04 -0700
committerReynold Xin <rxin@apache.org>2014-09-30 15:55:04 -0700
commit6c696d7da64e764111b680b1eee040a61f944c26 (patch)
treeb901c4db68e335892858988cfb48959bf104fa1a /core
parent8764fe368bbd72fe76ed318faad0e97a7279e2fe (diff)
downloadspark-6c696d7da64e764111b680b1eee040a61f944c26.tar.gz
spark-6c696d7da64e764111b680b1eee040a61f944c26.tar.bz2
spark-6c696d7da64e764111b680b1eee040a61f944c26.zip
Remove compiler warning from TaskContext change.
Author: Reynold Xin <rxin@apache.org> Closes #2602 from rxin/warning and squashes the following commits: 130186b [Reynold Xin] Remove compiler warning from TaskContext change.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala8
1 files changed, 4 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
index 929ded58a3..0d97506450 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
@@ -956,9 +956,9 @@ class PairRDDFunctions[K, V](self: RDD[(K, V)])
val writeShard = (context: TaskContext, iter: Iterator[(K,V)]) => {
// Hadoop wants a 32-bit task attempt ID, so if ours is bigger than Int.MaxValue, roll it
// around by taking a mod. We expect that no task will be attempted 2 billion times.
- val attemptNumber = (context.attemptId % Int.MaxValue).toInt
+ val attemptNumber = (context.getAttemptId % Int.MaxValue).toInt
/* "reduce task" <split #> <attempt # = spark task #> */
- val attemptId = newTaskAttemptID(jobtrackerID, stageId, isMap = false, context.partitionId,
+ val attemptId = newTaskAttemptID(jobtrackerID, stageId, isMap = false, context.getPartitionId,
attemptNumber)
val hadoopContext = newTaskAttemptContext(wrappedConf.value, attemptId)
val format = outfmt.newInstance
@@ -1027,9 +1027,9 @@ class PairRDDFunctions[K, V](self: RDD[(K, V)])
val writeToFile = (context: TaskContext, iter: Iterator[(K, V)]) => {
// Hadoop wants a 32-bit task attempt ID, so if ours is bigger than Int.MaxValue, roll it
// around by taking a mod. We expect that no task will be attempted 2 billion times.
- val attemptNumber = (context.attemptId % Int.MaxValue).toInt
+ val attemptNumber = (context.getAttemptId % Int.MaxValue).toInt
- writer.setup(context.stageId, context.partitionId, attemptNumber)
+ writer.setup(context.getStageId, context.getPartitionId, attemptNumber)
writer.open()
try {
var count = 0