From 0a38637d05d2338503ecceacfb911a6da6d49538 Mon Sep 17 00:00:00 2001 From: Reynold Xin Date: Mon, 21 Dec 2015 22:15:52 -0800 Subject: [SPARK-11807] Remove support for Hadoop < 2.2 i.e. Hadoop 1 and Hadoop 2.0 Author: Reynold Xin Closes #10404 from rxin/SPARK-11807. --- .../apache/spark/deploy/history/FsHistoryProvider.scala | 10 +--------- .../spark/mapreduce/SparkHadoopMapReduceUtil.scala | 17 ++--------------- 2 files changed, 3 insertions(+), 24 deletions(-) (limited to 'core') diff --git a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala index 718efc4f3b..6e91d73b6e 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala @@ -663,16 +663,8 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock) // For testing. private[history] def isFsInSafeMode(dfs: DistributedFileSystem): Boolean = { - val hadoop1Class = "org.apache.hadoop.hdfs.protocol.FSConstants$SafeModeAction" val hadoop2Class = "org.apache.hadoop.hdfs.protocol.HdfsConstants$SafeModeAction" - val actionClass: Class[_] = - try { - getClass().getClassLoader().loadClass(hadoop2Class) - } catch { - case _: ClassNotFoundException => - getClass().getClassLoader().loadClass(hadoop1Class) - } - + val actionClass: Class[_] = getClass().getClassLoader().loadClass(hadoop2Class) val action = actionClass.getField("SAFEMODE_GET").get(null) val method = dfs.getClass().getMethod("setSafeMode", action.getClass()) method.invoke(dfs, action).asInstanceOf[Boolean] diff --git a/core/src/main/scala/org/apache/spark/mapreduce/SparkHadoopMapReduceUtil.scala b/core/src/main/scala/org/apache/spark/mapreduce/SparkHadoopMapReduceUtil.scala index 943ebcb7bd..82d807fad8 100644 --- a/core/src/main/scala/org/apache/spark/mapreduce/SparkHadoopMapReduceUtil.scala +++ b/core/src/main/scala/org/apache/spark/mapreduce/SparkHadoopMapReduceUtil.scala @@ -26,17 +26,13 @@ import org.apache.spark.util.Utils private[spark] trait SparkHadoopMapReduceUtil { def newJobContext(conf: Configuration, jobId: JobID): JobContext = { - val klass = firstAvailableClass( - "org.apache.hadoop.mapreduce.task.JobContextImpl", // hadoop2, hadoop2-yarn - "org.apache.hadoop.mapreduce.JobContext") // hadoop1 + val klass = Utils.classForName("org.apache.hadoop.mapreduce.task.JobContextImpl") val ctor = klass.getDeclaredConstructor(classOf[Configuration], classOf[JobID]) ctor.newInstance(conf, jobId).asInstanceOf[JobContext] } def newTaskAttemptContext(conf: Configuration, attemptId: TaskAttemptID): TaskAttemptContext = { - val klass = firstAvailableClass( - "org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl", // hadoop2, hadoop2-yarn - "org.apache.hadoop.mapreduce.TaskAttemptContext") // hadoop1 + val klass = Utils.classForName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl") val ctor = klass.getDeclaredConstructor(classOf[Configuration], classOf[TaskAttemptID]) ctor.newInstance(conf, attemptId).asInstanceOf[TaskAttemptContext] } @@ -69,13 +65,4 @@ trait SparkHadoopMapReduceUtil { } } } - - private def firstAvailableClass(first: String, second: String): Class[_] = { - try { - Utils.classForName(first) - } catch { - case e: ClassNotFoundException => - Utils.classForName(second) - } - } } -- cgit v1.2.3