aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorbomeng <bmeng@us.ibm.com>2016-03-10 11:17:35 -0800
committerMarcelo Vanzin <vanzin@cloudera.com>2016-03-10 11:17:40 -0800
commit235f4ac6fc05802a00889a3a0b39377711cbc7e3 (patch)
treeb9430e63dfe0afad34b90cbf0c7fb72c6dc7a841 /core
parentd24801ad285ac3f2282fe20d1250a010673e2f96 (diff)
downloadspark-235f4ac6fc05802a00889a3a0b39377711cbc7e3.tar.gz
spark-235f4ac6fc05802a00889a3a0b39377711cbc7e3.tar.bz2
spark-235f4ac6fc05802a00889a3a0b39377711cbc7e3.zip
[SPARK-13727][CORE] SparkConf.contains does not consider deprecated keys
The contains() method does not return consistently with get() if the key is deprecated. For example, import org.apache.spark.SparkConf val conf = new SparkConf() conf.set("spark.io.compression.lz4.block.size", "12345") # display some deprecated warning message conf.get("spark.io.compression.lz4.block.size") # return 12345 conf.get("spark.io.compression.lz4.blockSize") # return 12345 conf.contains("spark.io.compression.lz4.block.size") # return true conf.contains("spark.io.compression.lz4.blockSize") # return false The fix will make the contains() and get() more consistent. I've added a test case for this. (Please explain how this patch was tested. E.g. unit tests, integration tests, manual tests) Unit tests should be sufficient. Author: bomeng <bmeng@us.ibm.com> Closes #11568 from bomeng/SPARK-13727.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala5
-rw-r--r--core/src/test/scala/org/apache/spark/SparkConfSuite.scala14
2 files changed, 18 insertions, 1 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 16423e771a..f9c01f30f1 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -388,7 +388,10 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
def getAppId: String = get("spark.app.id")
/** Does the configuration contain a given parameter? */
- def contains(key: String): Boolean = settings.containsKey(key)
+ def contains(key: String): Boolean = {
+ settings.containsKey(key) ||
+ configsWithAlternatives.get(key).toSeq.flatten.exists { alt => contains(alt.key) }
+ }
/** Copy this object */
override def clone: SparkConf = {
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index 79881f30b2..a883d1b57e 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -267,6 +267,20 @@ class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSyst
conf.set("spark.akka.lookupTimeout", "4")
assert(RpcUtils.lookupRpcTimeout(conf).duration === (4 seconds))
}
+
+ test("SPARK-13727") {
+ val conf = new SparkConf()
+ // set the conf in the deprecated way
+ conf.set("spark.io.compression.lz4.block.size", "12345")
+ // get the conf in the recommended way
+ assert(conf.get("spark.io.compression.lz4.blockSize") === "12345")
+ // we can still get the conf in the deprecated way
+ assert(conf.get("spark.io.compression.lz4.block.size") === "12345")
+ // the contains() also works as expected
+ assert(conf.contains("spark.io.compression.lz4.block.size"))
+ assert(conf.contains("spark.io.compression.lz4.blockSize"))
+ assert(conf.contains("spark.io.unknown") === false)
+ }
}
class Class1 {}