aboutsummaryrefslogtreecommitdiff
path: root/mllib
diff options
context:
space:
mode:
authorShixiong Zhu <shixiong@databricks.com>2016-01-22 21:20:04 -0800
committerReynold Xin <rxin@databricks.com>2016-01-22 21:20:04 -0800
commitbc1babd63da4ee56e6d371eb24805a5d714e8295 (patch)
tree8aec6a20e3d23574f53d818752df61a28c64d635 /mllib
parentd8fefab4d8149f0638282570c75271ef35c65cff (diff)
downloadspark-bc1babd63da4ee56e6d371eb24805a5d714e8295.tar.gz
spark-bc1babd63da4ee56e6d371eb24805a5d714e8295.tar.bz2
spark-bc1babd63da4ee56e6d371eb24805a5d714e8295.zip
[SPARK-7997][CORE] Remove Akka from Spark Core and Streaming
- Remove Akka dependency from core. Note: the streaming-akka project still uses Akka. - Remove HttpFileServer - Remove Akka configs from SparkConf and SSLOptions - Rename `spark.akka.frameSize` to `spark.rpc.message.maxSize`. I think it's still worth to keep this config because using `DirectTaskResult` or `IndirectTaskResult` depends on it. - Update comments and docs Author: Shixiong Zhu <shixiong@databricks.com> Closes #10854 from zsxwing/remove-akka.
Diffstat (limited to 'mllib')
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/feature/VectorTransformer.scala2
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala2
2 files changed, 2 insertions, 2 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/feature/VectorTransformer.scala b/mllib/src/main/scala/org/apache/spark/mllib/feature/VectorTransformer.scala
index 5778fd1d09..ca7385128d 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/feature/VectorTransformer.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/feature/VectorTransformer.scala
@@ -47,7 +47,7 @@ trait VectorTransformer extends Serializable {
*/
@Since("1.1.0")
def transform(data: RDD[Vector]): RDD[Vector] = {
- // Later in #1498 , all RDD objects are sent via broadcasting instead of akka.
+ // Later in #1498 , all RDD objects are sent via broadcasting instead of RPC.
// So it should be no longer necessary to explicitly broadcast `this` object.
data.map(x => this.transform(x))
}
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala b/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala
index 9b2d023bbf..95d874b843 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala
@@ -29,7 +29,7 @@ trait LocalClusterSparkContext extends BeforeAndAfterAll { self: Suite =>
val conf = new SparkConf()
.setMaster("local-cluster[2, 1, 1024]")
.setAppName("test-cluster")
- .set("spark.akka.frameSize", "1") // set to 1MB to detect direct serialization of data
+ .set("spark.rpc.message.maxSize", "1") // set to 1MB to detect direct serialization of data
sc = new SparkContext(conf)
}