From 235f4ac6fc05802a00889a3a0b39377711cbc7e3 Mon Sep 17 00:00:00 2001 From: bomeng Date: Thu, 10 Mar 2016 11:17:35 -0800 Subject: [SPARK-13727][CORE] SparkConf.contains does not consider deprecated keys The contains() method does not return consistently with get() if the key is deprecated. For example, import org.apache.spark.SparkConf val conf = new SparkConf() conf.set("spark.io.compression.lz4.block.size", "12345") # display some deprecated warning message conf.get("spark.io.compression.lz4.block.size") # return 12345 conf.get("spark.io.compression.lz4.blockSize") # return 12345 conf.contains("spark.io.compression.lz4.block.size") # return true conf.contains("spark.io.compression.lz4.blockSize") # return false The fix will make the contains() and get() more consistent. I've added a test case for this. (Please explain how this patch was tested. E.g. unit tests, integration tests, manual tests) Unit tests should be sufficient. Author: bomeng Closes #11568 from bomeng/SPARK-13727. --- core/src/main/scala/org/apache/spark/SparkConf.scala | 5 ++++- core/src/test/scala/org/apache/spark/SparkConfSuite.scala | 14 ++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) (limited to 'core') diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala index 16423e771a..f9c01f30f1 100644 --- a/core/src/main/scala/org/apache/spark/SparkConf.scala +++ b/core/src/main/scala/org/apache/spark/SparkConf.scala @@ -388,7 +388,10 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { def getAppId: String = get("spark.app.id") /** Does the configuration contain a given parameter? */ - def contains(key: String): Boolean = settings.containsKey(key) + def contains(key: String): Boolean = { + settings.containsKey(key) || + configsWithAlternatives.get(key).toSeq.flatten.exists { alt => contains(alt.key) } + } /** Copy this object */ override def clone: SparkConf = { diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala index 79881f30b2..a883d1b57e 100644 --- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala @@ -267,6 +267,20 @@ class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSyst conf.set("spark.akka.lookupTimeout", "4") assert(RpcUtils.lookupRpcTimeout(conf).duration === (4 seconds)) } + + test("SPARK-13727") { + val conf = new SparkConf() + // set the conf in the deprecated way + conf.set("spark.io.compression.lz4.block.size", "12345") + // get the conf in the recommended way + assert(conf.get("spark.io.compression.lz4.blockSize") === "12345") + // we can still get the conf in the deprecated way + assert(conf.get("spark.io.compression.lz4.block.size") === "12345") + // the contains() also works as expected + assert(conf.contains("spark.io.compression.lz4.block.size")) + assert(conf.contains("spark.io.compression.lz4.blockSize")) + assert(conf.contains("spark.io.unknown") === false) + } } class Class1 {} -- cgit v1.2.3