aboutsummaryrefslogtreecommitdiff
path: root/mllib
diff options
context:
space:
mode:
authorHenry Saputra <hsaputra@apache.org>2014-01-12 10:34:13 -0800
committerHenry Saputra <hsaputra@apache.org>2014-01-12 10:34:13 -0800
commit91a563608e301bb243fca3765d569bde65ad747c (patch)
tree1dc3da48852d2677846d415b96b4ee0558a54cb5 /mllib
parent93a65e5fde64ffed3dbd2a050c1007e077ecd004 (diff)
parent288a878999848adb130041d1e40c14bfc879cec6 (diff)
downloadspark-91a563608e301bb243fca3765d569bde65ad747c.tar.gz
spark-91a563608e301bb243fca3765d569bde65ad747c.tar.bz2
spark-91a563608e301bb243fca3765d569bde65ad747c.zip
Merge branch 'master' into remove_simpleredundantreturn_scala
Diffstat (limited to 'mllib')
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/recommendation/ALS.scala15
1 files changed, 8 insertions, 7 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/recommendation/ALS.scala b/mllib/src/main/scala/org/apache/spark/mllib/recommendation/ALS.scala
index 8b27ecf82c..89ee07063d 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/recommendation/ALS.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/recommendation/ALS.scala
@@ -22,7 +22,7 @@ import scala.util.Random
import scala.util.Sorting
import org.apache.spark.broadcast.Broadcast
-import org.apache.spark.{Logging, HashPartitioner, Partitioner, SparkContext}
+import org.apache.spark.{Logging, HashPartitioner, Partitioner, SparkContext, SparkConf}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.rdd.RDD
import org.apache.spark.serializer.KryoRegistrator
@@ -578,12 +578,13 @@ object ALS {
val implicitPrefs = if (args.length >= 7) args(6).toBoolean else false
val alpha = if (args.length >= 8) args(7).toDouble else 1
val blocks = if (args.length == 9) args(8).toInt else -1
- val sc = new SparkContext(master, "ALS")
- sc.conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
- sc.conf.set("spark.kryo.registrator", classOf[ALSRegistrator].getName)
- sc.conf.set("spark.kryo.referenceTracking", "false")
- sc.conf.set("spark.kryoserializer.buffer.mb", "8")
- sc.conf.set("spark.locality.wait", "10000")
+ val conf = new SparkConf()
+ .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
+ .set("spark.kryo.registrator", classOf[ALSRegistrator].getName)
+ .set("spark.kryo.referenceTracking", "false")
+ .set("spark.kryoserializer.buffer.mb", "8")
+ .set("spark.locality.wait", "10000")
+ val sc = new SparkContext(master, "ALS", conf)
val ratings = sc.textFile(ratingsFile).map { line =>
val fields = line.split(',')