aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala11
-rw-r--r--core/src/test/scala/org/apache/spark/SparkConfSuite.scala22
2 files changed, 28 insertions, 5 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 31b41d9524..e85e5aa237 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -56,10 +56,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
private val settings = new ConcurrentHashMap[String, String]()
- private val reader = new ConfigReader(new SparkConfigProvider(settings))
- reader.bindEnv(new ConfigProvider {
- override def get(key: String): Option[String] = Option(getenv(key))
- })
+ @transient private lazy val reader: ConfigReader = {
+ val _reader = new ConfigReader(new SparkConfigProvider(settings))
+ _reader.bindEnv(new ConfigProvider {
+ override def get(key: String): Option[String] = Option(getenv(key))
+ })
+ _reader
+ }
if (loadDefaults) {
loadFromSystemProperties(false)
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index 1f0f655a15..83906cff12 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -26,8 +26,9 @@ import scala.util.{Random, Try}
import com.esotericsoftware.kryo.Kryo
+import org.apache.spark.internal.config._
import org.apache.spark.network.util.ByteUnit
-import org.apache.spark.serializer.{KryoRegistrator, KryoSerializer}
+import org.apache.spark.serializer.{JavaSerializer, KryoRegistrator, KryoSerializer}
import org.apache.spark.util.{ResetSystemProperties, RpcUtils}
class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSystemProperties {
@@ -283,6 +284,25 @@ class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSyst
assert(conf.contains("spark.io.compression.lz4.blockSize"))
assert(conf.contains("spark.io.unknown") === false)
}
+
+ val serializers = Map(
+ "java" -> new JavaSerializer(new SparkConf()),
+ "kryo" -> new KryoSerializer(new SparkConf()))
+
+ serializers.foreach { case (name, ser) =>
+ test(s"SPARK-17240: SparkConf should be serializable ($name)") {
+ val conf = new SparkConf()
+ conf.set(DRIVER_CLASS_PATH, "${" + DRIVER_JAVA_OPTIONS.key + "}")
+ conf.set(DRIVER_JAVA_OPTIONS, "test")
+
+ val serializer = ser.newInstance()
+ val bytes = serializer.serialize(conf)
+ val deser = serializer.deserialize[SparkConf](bytes)
+
+ assert(conf.get(DRIVER_CLASS_PATH) === deser.get(DRIVER_CLASS_PATH))
+ }
+ }
+
}
class Class1 {}