diff options
author | Reynold Xin <rxin@apache.org> | 2013-10-20 12:14:59 -0700 |
---|---|---|
committer | Reynold Xin <rxin@apache.org> | 2013-10-20 12:22:07 -0700 |
commit | 8e1937f8ba999c875cc0d4403c4aa92d0a045de4 (patch) | |
tree | 84e27a30aa822790a99b78e03a41d07df5079eaf /core/src/main/scala/org/apache/hadoop/mapred/SparkHadoopMapRedUtil.scala | |
parent | e4abb75d70aa08377829f635fe6135d94e28f434 (diff) | |
download | spark-8e1937f8ba999c875cc0d4403c4aa92d0a045de4.tar.gz spark-8e1937f8ba999c875cc0d4403c4aa92d0a045de4.tar.bz2 spark-8e1937f8ba999c875cc0d4403c4aa92d0a045de4.zip |
Made the following traits/interfaces/classes non-public:
SparkHadoopWriter
SparkHadoopMapRedUtil
SparkHadoopMapReduceUtil
SparkHadoopUtil
PythonAccumulatorParam
JobLogger
BlockManagerSlaveActor
Diffstat (limited to 'core/src/main/scala/org/apache/hadoop/mapred/SparkHadoopMapRedUtil.scala')
-rw-r--r-- | core/src/main/scala/org/apache/hadoop/mapred/SparkHadoopMapRedUtil.scala | 17 |
1 files changed, 13 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/hadoop/mapred/SparkHadoopMapRedUtil.scala b/core/src/main/scala/org/apache/hadoop/mapred/SparkHadoopMapRedUtil.scala index f87460039b..0c47afae54 100644 --- a/core/src/main/scala/org/apache/hadoop/mapred/SparkHadoopMapRedUtil.scala +++ b/core/src/main/scala/org/apache/hadoop/mapred/SparkHadoopMapRedUtil.scala @@ -17,20 +17,29 @@ package org.apache.hadoop.mapred +private[apache] trait SparkHadoopMapRedUtil { def newJobContext(conf: JobConf, jobId: JobID): JobContext = { - val klass = firstAvailableClass("org.apache.hadoop.mapred.JobContextImpl", "org.apache.hadoop.mapred.JobContext"); - val ctor = klass.getDeclaredConstructor(classOf[JobConf], classOf[org.apache.hadoop.mapreduce.JobID]) + val klass = firstAvailableClass("org.apache.hadoop.mapred.JobContextImpl", + "org.apache.hadoop.mapred.JobContext") + val ctor = klass.getDeclaredConstructor(classOf[JobConf], + classOf[org.apache.hadoop.mapreduce.JobID]) ctor.newInstance(conf, jobId).asInstanceOf[JobContext] } def newTaskAttemptContext(conf: JobConf, attemptId: TaskAttemptID): TaskAttemptContext = { - val klass = firstAvailableClass("org.apache.hadoop.mapred.TaskAttemptContextImpl", "org.apache.hadoop.mapred.TaskAttemptContext") + val klass = firstAvailableClass("org.apache.hadoop.mapred.TaskAttemptContextImpl", + "org.apache.hadoop.mapred.TaskAttemptContext") val ctor = klass.getDeclaredConstructor(classOf[JobConf], classOf[TaskAttemptID]) ctor.newInstance(conf, attemptId).asInstanceOf[TaskAttemptContext] } - def newTaskAttemptID(jtIdentifier: String, jobId: Int, isMap: Boolean, taskId: Int, attemptId: Int) = { + def newTaskAttemptID( + jtIdentifier: String, + jobId: Int, + isMap: Boolean, + taskId: Int, + attemptId: Int) = { new TaskAttemptID(jtIdentifier, jobId, isMap, taskId, attemptId) } |