From bc1babd63da4ee56e6d371eb24805a5d714e8295 Mon Sep 17 00:00:00 2001 From: Shixiong Zhu Date: Fri, 22 Jan 2016 21:20:04 -0800 Subject: [SPARK-7997][CORE] Remove Akka from Spark Core and Streaming - Remove Akka dependency from core. Note: the streaming-akka project still uses Akka. - Remove HttpFileServer - Remove Akka configs from SparkConf and SSLOptions - Rename `spark.akka.frameSize` to `spark.rpc.message.maxSize`. I think it's still worth to keep this config because using `DirectTaskResult` or `IndirectTaskResult` depends on it. - Update comments and docs Author: Shixiong Zhu Closes #10854 from zsxwing/remove-akka. --- .../scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'mllib/src/test') diff --git a/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala b/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala index 9b2d023bbf..95d874b843 100644 --- a/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala +++ b/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala @@ -29,7 +29,7 @@ trait LocalClusterSparkContext extends BeforeAndAfterAll { self: Suite => val conf = new SparkConf() .setMaster("local-cluster[2, 1, 1024]") .setAppName("test-cluster") - .set("spark.akka.frameSize", "1") // set to 1MB to detect direct serialization of data + .set("spark.rpc.message.maxSize", "1") // set to 1MB to detect direct serialization of data sc = new SparkContext(conf) } -- cgit v1.2.3