aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorReynold Xin <rxin@cs.berkeley.edu>2013-08-05 22:33:00 -0700
committerReynold Xin <rxin@cs.berkeley.edu>2013-08-05 22:33:00 -0700
commitd031f73679db02e57e70a0ac1caf6a597f8e76c0 (patch)
tree85d8c054f7f64a4ef005c6ddac649c4d858719b2 /core
parent1b63dea816fd56837ac99181e4b90ebf9cd4aa28 (diff)
parenta30866438bf71c83575a3e83887bd4bf33c7cdde (diff)
downloadspark-d031f73679db02e57e70a0ac1caf6a597f8e76c0.tar.gz
spark-d031f73679db02e57e70a0ac1caf6a597f8e76c0.tar.bz2
spark-d031f73679db02e57e70a0ac1caf6a597f8e76c0.zip
Merge pull request #782 from WANdisco/master
SHARK-94 Log the files computed by HadoopRDD and NewHadoopRDD
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/spark/rdd/HadoopRDD.scala1
-rw-r--r--core/src/main/scala/spark/rdd/NewHadoopRDD.scala1
2 files changed, 2 insertions, 0 deletions
diff --git a/core/src/main/scala/spark/rdd/HadoopRDD.scala b/core/src/main/scala/spark/rdd/HadoopRDD.scala
index d0fdeb741e..fd00d59c77 100644
--- a/core/src/main/scala/spark/rdd/HadoopRDD.scala
+++ b/core/src/main/scala/spark/rdd/HadoopRDD.scala
@@ -88,6 +88,7 @@ class HadoopRDD[K, V](
override def compute(theSplit: Partition, context: TaskContext) = new NextIterator[(K, V)] {
val split = theSplit.asInstanceOf[HadoopPartition]
+ logInfo("Input split: " + split.inputSplit)
var reader: RecordReader[K, V] = null
val conf = confBroadcast.value.value
diff --git a/core/src/main/scala/spark/rdd/NewHadoopRDD.scala b/core/src/main/scala/spark/rdd/NewHadoopRDD.scala
index 17fe805fd4..0b71608169 100644
--- a/core/src/main/scala/spark/rdd/NewHadoopRDD.scala
+++ b/core/src/main/scala/spark/rdd/NewHadoopRDD.scala
@@ -73,6 +73,7 @@ class NewHadoopRDD[K, V](
override def compute(theSplit: Partition, context: TaskContext) = new Iterator[(K, V)] {
val split = theSplit.asInstanceOf[NewHadoopPartition]
+ logInfo("Input split: " + split.serializableHadoopSplit)
val conf = confBroadcast.value.value
val attemptId = newTaskAttemptID(jobtrackerId, id, true, split.index, 0)
val hadoopAttemptContext = newTaskAttemptContext(conf, attemptId)