aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2016-08-25 16:11:42 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2016-08-25 16:11:42 -0700
commit9b5a1d1d53bc4412de3cbc86dc819b0c213229a8 (patch)
tree44734abfc7b061da7a6fc1bce3ab3fa6ec528beb /core
parent3e4c7db4d11c474457e7886a5501108ebab0cf6d (diff)
downloadspark-9b5a1d1d53bc4412de3cbc86dc819b0c213229a8.tar.gz
spark-9b5a1d1d53bc4412de3cbc86dc819b0c213229a8.tar.bz2
spark-9b5a1d1d53bc4412de3cbc86dc819b0c213229a8.zip
[SPARK-17240][CORE] Make SparkConf serializable again.
Make the config reader transient, and initialize it lazily so that serialization works with both java and kryo (and hopefully any other custom serializer). Added unit test to make sure SparkConf remains serializable and the reader works with both built-in serializers. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #14813 from vanzin/SPARK-17240.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala11
-rw-r--r--core/src/test/scala/org/apache/spark/SparkConfSuite.scala22
2 files changed, 28 insertions, 5 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 31b41d9524..e85e5aa237 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -56,10 +56,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
private val settings = new ConcurrentHashMap[String, String]()
- private val reader = new ConfigReader(new SparkConfigProvider(settings))
- reader.bindEnv(new ConfigProvider {
- override def get(key: String): Option[String] = Option(getenv(key))
- })
+ @transient private lazy val reader: ConfigReader = {
+ val _reader = new ConfigReader(new SparkConfigProvider(settings))
+ _reader.bindEnv(new ConfigProvider {
+ override def get(key: String): Option[String] = Option(getenv(key))
+ })
+ _reader
+ }
if (loadDefaults) {
loadFromSystemProperties(false)
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index 1f0f655a15..83906cff12 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -26,8 +26,9 @@ import scala.util.{Random, Try}
import com.esotericsoftware.kryo.Kryo
+import org.apache.spark.internal.config._
import org.apache.spark.network.util.ByteUnit
-import org.apache.spark.serializer.{KryoRegistrator, KryoSerializer}
+import org.apache.spark.serializer.{JavaSerializer, KryoRegistrator, KryoSerializer}
import org.apache.spark.util.{ResetSystemProperties, RpcUtils}
class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSystemProperties {
@@ -283,6 +284,25 @@ class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSyst
assert(conf.contains("spark.io.compression.lz4.blockSize"))
assert(conf.contains("spark.io.unknown") === false)
}
+
+ val serializers = Map(
+ "java" -> new JavaSerializer(new SparkConf()),
+ "kryo" -> new KryoSerializer(new SparkConf()))
+
+ serializers.foreach { case (name, ser) =>
+ test(s"SPARK-17240: SparkConf should be serializable ($name)") {
+ val conf = new SparkConf()
+ conf.set(DRIVER_CLASS_PATH, "${" + DRIVER_JAVA_OPTIONS.key + "}")
+ conf.set(DRIVER_JAVA_OPTIONS, "test")
+
+ val serializer = ser.newInstance()
+ val bytes = serializer.serialize(conf)
+ val deser = serializer.deserialize[SparkConf](bytes)
+
+ assert(conf.get(DRIVER_CLASS_PATH) === deser.get(DRIVER_CLASS_PATH))
+ }
+ }
+
}
class Class1 {}