aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNeville Li <neville@spotify.com>2014-06-07 16:22:26 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-06-07 16:22:26 -0700
commit7b877b27053bfb7092e250e01a3b887e1b50a109 (patch)
tree0ae8ba61425d8489f396282798bed81c9c4077c7
parent3ace10dc91e72ebe5013d5106eb0968a77c99d8d (diff)
downloadspark-7b877b27053bfb7092e250e01a3b887e1b50a109.tar.gz
spark-7b877b27053bfb7092e250e01a3b887e1b50a109.tar.bz2
spark-7b877b27053bfb7092e250e01a3b887e1b50a109.zip
SPARK-2056 Set RDD name to input path
Author: Neville Li <neville@spotify.com> Closes #992 from nevillelyh/master and squashes the following commits: 3011739 [Neville Li] [SPARK-2056] Set RDD name to input path
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala8
1 files changed, 4 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index d941aea9d7..d721aba709 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -455,7 +455,7 @@ class SparkContext(config: SparkConf) extends Logging {
*/
def textFile(path: String, minPartitions: Int = defaultMinPartitions): RDD[String] = {
hadoopFile(path, classOf[TextInputFormat], classOf[LongWritable], classOf[Text],
- minPartitions).map(pair => pair._2.toString)
+ minPartitions).map(pair => pair._2.toString).setName(path)
}
/**
@@ -496,7 +496,7 @@ class SparkContext(config: SparkConf) extends Logging {
classOf[String],
classOf[String],
updateConf,
- minPartitions)
+ minPartitions).setName(path)
}
/**
@@ -551,7 +551,7 @@ class SparkContext(config: SparkConf) extends Logging {
inputFormatClass,
keyClass,
valueClass,
- minPartitions)
+ minPartitions).setName(path)
}
/**
@@ -623,7 +623,7 @@ class SparkContext(config: SparkConf) extends Logging {
val job = new NewHadoopJob(conf)
NewFileInputFormat.addInputPath(job, new Path(path))
val updatedConf = job.getConfiguration
- new NewHadoopRDD(this, fClass, kClass, vClass, updatedConf)
+ new NewHadoopRDD(this, fClass, kClass, vClass, updatedConf).setName(path)
}
/**