aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJacek Laskowski <jacek@japila.pl>2015-08-27 11:07:37 +0100
committerSean Owen <sowen@cloudera.com>2015-08-27 11:07:37 +0100
commitb02e8187225d1765f67ce38864dfaca487be8a44 (patch)
treeff179d53153630faeecd65dc8823e806e3fba2cd
parent1a446f75b6cac46caea0217a66abeb226946ac71 (diff)
downloadspark-b02e8187225d1765f67ce38864dfaca487be8a44.tar.gz
spark-b02e8187225d1765f67ce38864dfaca487be8a44.tar.bz2
spark-b02e8187225d1765f67ce38864dfaca487be8a44.zip
[SPARK-9613] [HOTFIX] Fix usage of JavaConverters removed in Scala 2.11
Fix for [JavaConverters.asJavaListConverter](http://www.scala-lang.org/api/2.10.5/index.html#scala.collection.JavaConverters$) being removed in 2.11.7 and hence the build fails with the 2.11 profile enabled. Tested with the default 2.10 and 2.11 profiles. BUILD SUCCESS in both cases. Build for 2.10: ./build/mvn -Pyarn -Phadoop-2.6 -Dhadoop.version=2.7.1 -DskipTests clean install and 2.11: ./dev/change-scala-version.sh 2.11 ./build/mvn -Pyarn -Phadoop-2.6 -Dhadoop.version=2.7.1 -Dscala-2.11 -DskipTests clean install Author: Jacek Laskowski <jacek@japila.pl> Closes #8479 from jaceklaskowski/SPARK-9613-hotfix.
-rw-r--r--mllib/src/test/java/org/apache/spark/ml/classification/JavaOneVsRestSuite.java2
1 files changed, 1 insertions, 1 deletions
diff --git a/mllib/src/test/java/org/apache/spark/ml/classification/JavaOneVsRestSuite.java b/mllib/src/test/java/org/apache/spark/ml/classification/JavaOneVsRestSuite.java
index 2744e020e9..253cabf013 100644
--- a/mllib/src/test/java/org/apache/spark/ml/classification/JavaOneVsRestSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/classification/JavaOneVsRestSuite.java
@@ -55,7 +55,7 @@ public class JavaOneVsRestSuite implements Serializable {
double[] xMean = {5.843, 3.057, 3.758, 1.199};
double[] xVariance = {0.6856, 0.1899, 3.116, 0.581};
- List<LabeledPoint> points = JavaConverters.asJavaListConverter(
+ List<LabeledPoint> points = JavaConverters.seqAsJavaListConverter(
generateMultinomialLogisticInput(weights, xMean, xVariance, true, nPoints, 42)
).asJava();
datasetRDD = jsc.parallelize(points, 2);