aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorYuming Wang <wgyumg@gmail.com>2017-02-28 10:13:42 +0000
committerSean Owen <sowen@cloudera.com>2017-02-28 10:13:42 +0000
commit9b8eca65dcf68129470ead39362ce870ffb0bb1d (patch)
tree282c7af7443b31416ff3f9821615f18635de916b /core/src/test
parenta350bc16d36c58b48ac01f0258678ffcdb77e793 (diff)
downloadspark-9b8eca65dcf68129470ead39362ce870ffb0bb1d.tar.gz
spark-9b8eca65dcf68129470ead39362ce870ffb0bb1d.tar.bz2
spark-9b8eca65dcf68129470ead39362ce870ffb0bb1d.zip
[SPARK-19660][CORE][SQL] Replace the configuration property names that are deprecated in the version of Hadoop 2.6
## What changes were proposed in this pull request? Replace all the Hadoop deprecated configuration property names according to [DeprecatedProperties](https://hadoop.apache.org/docs/r2.6.0/hadoop-project-dist/hadoop-common/DeprecatedProperties.html). except: https://github.com/apache/spark/blob/v2.1.0/python/pyspark/sql/tests.py#L1533 https://github.com/apache/spark/blob/v2.1.0/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala#L987 https://github.com/apache/spark/blob/v2.1.0/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala#L45 https://github.com/apache/spark/blob/v2.1.0/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala#L614 ## How was this patch tested? Existing tests Author: Yuming Wang <wgyumg@gmail.com> Closes #16990 from wangyum/HadoopDeprecatedProperties.
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/org/apache/spark/FileSuite.scala5
1 files changed, 3 insertions, 2 deletions
diff --git a/core/src/test/scala/org/apache/spark/FileSuite.scala b/core/src/test/scala/org/apache/spark/FileSuite.scala
index a2d3177c5c..5be0121db5 100644
--- a/core/src/test/scala/org/apache/spark/FileSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileSuite.scala
@@ -401,7 +401,7 @@ class FileSuite extends SparkFunSuite with LocalSparkContext {
job.setOutputKeyClass(classOf[String])
job.setOutputValueClass(classOf[String])
job.set("mapred.output.format.class", classOf[TextOutputFormat[String, String]].getName)
- job.set("mapred.output.dir", tempDir.getPath + "/outputDataset_old")
+ job.set("mapreduce.output.fileoutputformat.outputdir", tempDir.getPath + "/outputDataset_old")
randomRDD.saveAsHadoopDataset(job)
assert(new File(tempDir.getPath + "/outputDataset_old/part-00000").exists() === true)
}
@@ -415,7 +415,8 @@ class FileSuite extends SparkFunSuite with LocalSparkContext {
job.setOutputValueClass(classOf[String])
job.setOutputFormatClass(classOf[NewTextOutputFormat[String, String]])
val jobConfig = job.getConfiguration
- jobConfig.set("mapred.output.dir", tempDir.getPath + "/outputDataset_new")
+ jobConfig.set("mapreduce.output.fileoutputformat.outputdir",
+ tempDir.getPath + "/outputDataset_new")
randomRDD.saveAsNewAPIHadoopDataset(jobConfig)
assert(new File(tempDir.getPath + "/outputDataset_new/part-r-00000").exists() === true)
}