diff options
author | Matei Zaharia <matei@eecs.berkeley.edu> | 2011-05-30 23:54:14 -0700 |
---|---|---|
committer | Matei Zaharia <matei@eecs.berkeley.edu> | 2011-05-30 23:54:14 -0700 |
commit | 8b0390d344e7ddcc574fc58f4f90f96555e53878 (patch) | |
tree | 3e3c048c094a8a357844f50ca7d8310595bdbf2c | |
parent | c501cff9243895bcbf7e5905965e22f05ba2132d (diff) | |
download | spark-8b0390d344e7ddcc574fc58f4f90f96555e53878.tar.gz spark-8b0390d344e7ddcc574fc58f4f90f96555e53878.tar.bz2 spark-8b0390d344e7ddcc574fc58f4f90f96555e53878.zip |
Instantiate NullWritable properly in HadoopFile
-rw-r--r-- | core/src/main/scala/spark/HadoopFile.scala | 14 |
1 files changed, 12 insertions, 2 deletions
diff --git a/core/src/main/scala/spark/HadoopFile.scala b/core/src/main/scala/spark/HadoopFile.scala index 0a7996c7bd..beb53ce1a5 100644 --- a/core/src/main/scala/spark/HadoopFile.scala +++ b/core/src/main/scala/spark/HadoopFile.scala @@ -3,6 +3,7 @@ package spark import mesos.SlaveOffer import org.apache.hadoop.io.LongWritable +import org.apache.hadoop.io.NullWritable import org.apache.hadoop.io.Text import org.apache.hadoop.mapred.FileInputFormat import org.apache.hadoop.mapred.InputFormat @@ -51,6 +52,15 @@ extends RDD[(K, V)](sc) { .asInstanceOf[InputFormat[K, V]] } + // Helper method for creating a Hadoop Writable, because the commonly used + // NullWritable class has no constructor + def createWritable[T](clazz: Class[T]): T = { + if (clazz == classOf[NullWritable]) + NullWritable.get().asInstanceOf[T] + else + clazz.newInstance() + } + override def splits = splits_ override def compute(theSplit: Split) = new Iterator[(K, V)] { @@ -63,8 +73,8 @@ extends RDD[(K, V)](sc) { val fmt = createInputFormat(conf) reader = fmt.getRecordReader(split.inputSplit.value, conf, Reporter.NULL) - val key: K = keyClass.newInstance() - val value: V = valueClass.newInstance() + val key: K = createWritable(keyClass) + val value: V = createWritable(valueClass) var gotNext = false var finished = false |