aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2016-02-10 13:34:53 -0800
committerReynold Xin <rxin@databricks.com>2016-02-10 13:34:53 -0800
commit29c547303f886b96b74b411ac70f0fa81113f086 (patch)
treea0696ca78c2fb21e9d480c0704674f1d517e42fd /core
parent5947fa8fa1f95d8fc1537c1e37bc16bae8fe7988 (diff)
downloadspark-29c547303f886b96b74b411ac70f0fa81113f086.tar.gz
spark-29c547303f886b96b74b411ac70f0fa81113f086.tar.bz2
spark-29c547303f886b96b74b411ac70f0fa81113f086.zip
[SPARK-12414][CORE] Remove closure serializer
Remove spark.closure.serializer option and use JavaSerializer always CC andrewor14 rxin I see there's a discussion in the JIRA but just thought I'd offer this for a look at what the change would be. Author: Sean Owen <sowen@cloudera.com> Closes #11150 from srowen/SPARK-12414.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkEnv.scala5
-rw-r--r--core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala3
2 files changed, 3 insertions, 5 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala
index 9461afdc54..204f7356f7 100644
--- a/core/src/main/scala/org/apache/spark/SparkEnv.scala
+++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala
@@ -35,7 +35,7 @@ import org.apache.spark.network.netty.NettyBlockTransferService
import org.apache.spark.rpc.{RpcEndpoint, RpcEndpointRef, RpcEnv}
import org.apache.spark.scheduler.{LiveListenerBus, OutputCommitCoordinator}
import org.apache.spark.scheduler.OutputCommitCoordinator.OutputCommitCoordinatorEndpoint
-import org.apache.spark.serializer.Serializer
+import org.apache.spark.serializer.{JavaSerializer, Serializer}
import org.apache.spark.shuffle.ShuffleManager
import org.apache.spark.storage._
import org.apache.spark.util.{RpcUtils, Utils}
@@ -277,8 +277,7 @@ object SparkEnv extends Logging {
"spark.serializer", "org.apache.spark.serializer.JavaSerializer")
logDebug(s"Using serializer: ${serializer.getClass}")
- val closureSerializer = instantiateClassFromConf[Serializer](
- "spark.closure.serializer", "org.apache.spark.serializer.JavaSerializer")
+ val closureSerializer = new JavaSerializer(conf)
def registerOrLookupEndpoint(
name: String, endpointCreator: => RpcEndpoint):
diff --git a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
index f869bcd708..27d063630b 100644
--- a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
@@ -282,8 +282,7 @@ class KryoSerializerSuite extends SparkFunSuite with SharedSparkContext {
test("kryo with fold") {
val control = 1 :: 2 :: Nil
// zeroValue must not be a ClassWithoutNoArgConstructor instance because it will be
- // serialized by spark.closure.serializer but spark.closure.serializer only supports
- // the default Java serializer.
+ // serialized by the Java serializer.
val result = sc.parallelize(control, 2).map(new ClassWithoutNoArgConstructor(_))
.fold(null)((t1, t2) => {
val t1x = if (t1 == null) 0 else t1.x