aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-12-21 22:15:52 -0800
committerReynold Xin <rxin@databricks.com>2015-12-21 22:15:52 -0800
commit0a38637d05d2338503ecceacfb911a6da6d49538 (patch)
tree080b3f2485cd1e2d93f23b764181415df795719d /core
parent29cecd4a42f6969613e5b2a40f2724f99e7eec01 (diff)
downloadspark-0a38637d05d2338503ecceacfb911a6da6d49538.tar.gz
spark-0a38637d05d2338503ecceacfb911a6da6d49538.tar.bz2
spark-0a38637d05d2338503ecceacfb911a6da6d49538.zip
[SPARK-11807] Remove support for Hadoop < 2.2
i.e. Hadoop 1 and Hadoop 2.0 Author: Reynold Xin <rxin@databricks.com> Closes #10404 from rxin/SPARK-11807.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala10
-rw-r--r--core/src/main/scala/org/apache/spark/mapreduce/SparkHadoopMapReduceUtil.scala17
2 files changed, 3 insertions, 24 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala
index 718efc4f3b..6e91d73b6e 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala
@@ -663,16 +663,8 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
// For testing.
private[history] def isFsInSafeMode(dfs: DistributedFileSystem): Boolean = {
- val hadoop1Class = "org.apache.hadoop.hdfs.protocol.FSConstants$SafeModeAction"
val hadoop2Class = "org.apache.hadoop.hdfs.protocol.HdfsConstants$SafeModeAction"
- val actionClass: Class[_] =
- try {
- getClass().getClassLoader().loadClass(hadoop2Class)
- } catch {
- case _: ClassNotFoundException =>
- getClass().getClassLoader().loadClass(hadoop1Class)
- }
-
+ val actionClass: Class[_] = getClass().getClassLoader().loadClass(hadoop2Class)
val action = actionClass.getField("SAFEMODE_GET").get(null)
val method = dfs.getClass().getMethod("setSafeMode", action.getClass())
method.invoke(dfs, action).asInstanceOf[Boolean]
diff --git a/core/src/main/scala/org/apache/spark/mapreduce/SparkHadoopMapReduceUtil.scala b/core/src/main/scala/org/apache/spark/mapreduce/SparkHadoopMapReduceUtil.scala
index 943ebcb7bd..82d807fad8 100644
--- a/core/src/main/scala/org/apache/spark/mapreduce/SparkHadoopMapReduceUtil.scala
+++ b/core/src/main/scala/org/apache/spark/mapreduce/SparkHadoopMapReduceUtil.scala
@@ -26,17 +26,13 @@ import org.apache.spark.util.Utils
private[spark]
trait SparkHadoopMapReduceUtil {
def newJobContext(conf: Configuration, jobId: JobID): JobContext = {
- val klass = firstAvailableClass(
- "org.apache.hadoop.mapreduce.task.JobContextImpl", // hadoop2, hadoop2-yarn
- "org.apache.hadoop.mapreduce.JobContext") // hadoop1
+ val klass = Utils.classForName("org.apache.hadoop.mapreduce.task.JobContextImpl")
val ctor = klass.getDeclaredConstructor(classOf[Configuration], classOf[JobID])
ctor.newInstance(conf, jobId).asInstanceOf[JobContext]
}
def newTaskAttemptContext(conf: Configuration, attemptId: TaskAttemptID): TaskAttemptContext = {
- val klass = firstAvailableClass(
- "org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl", // hadoop2, hadoop2-yarn
- "org.apache.hadoop.mapreduce.TaskAttemptContext") // hadoop1
+ val klass = Utils.classForName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl")
val ctor = klass.getDeclaredConstructor(classOf[Configuration], classOf[TaskAttemptID])
ctor.newInstance(conf, attemptId).asInstanceOf[TaskAttemptContext]
}
@@ -69,13 +65,4 @@ trait SparkHadoopMapReduceUtil {
}
}
}
-
- private def firstAvailableClass(first: String, second: String): Class[_] = {
- try {
- Utils.classForName(first)
- } catch {
- case e: ClassNotFoundException =>
- Utils.classForName(second)
- }
- }
}