aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--docs/mllib-collaborative-filtering.md10
-rw-r--r--docs/mllib-decision-tree.md20
-rw-r--r--docs/mllib-ensembles.md40
-rw-r--r--docs/mllib-linear-methods.md20
-rw-r--r--docs/mllib-naive-bayes.md10
5 files changed, 60 insertions, 40 deletions
diff --git a/docs/mllib-collaborative-filtering.md b/docs/mllib-collaborative-filtering.md
index 935cd8dad3..27aa4d38b7 100644
--- a/docs/mllib-collaborative-filtering.md
+++ b/docs/mllib-collaborative-filtering.md
@@ -97,8 +97,9 @@ val MSE = ratesAndPreds.map { case ((user, product), (r1, r2)) =>
}.mean()
println("Mean Squared Error = " + MSE)
-model.save("myModelPath")
-val sameModel = MatrixFactorizationModel.load("myModelPath")
+// Save and load model
+model.save(sc, "myModelPath")
+val sameModel = MatrixFactorizationModel.load(sc, "myModelPath")
{% endhighlight %}
If the rating matrix is derived from another source of information (e.g., it is inferred from
@@ -186,8 +187,9 @@ public class CollaborativeFiltering {
).rdd()).mean();
System.out.println("Mean Squared Error = " + MSE);
- model.save("myModelPath");
- MatrixFactorizationModel sameModel = MatrixFactorizationModel.load("myModelPath");
+ // Save and load model
+ model.save(sc.sc(), "myModelPath");
+ MatrixFactorizationModel sameModel = MatrixFactorizationModel.load(sc.sc(), "myModelPath");
}
}
{% endhighlight %}
diff --git a/docs/mllib-decision-tree.md b/docs/mllib-decision-tree.md
index 4695d1cde4..8e478ab035 100644
--- a/docs/mllib-decision-tree.md
+++ b/docs/mllib-decision-tree.md
@@ -223,8 +223,9 @@ val testErr = labelAndPreds.filter(r => r._1 != r._2).count.toDouble / testData.
println("Test Error = " + testErr)
println("Learned classification tree model:\n" + model.toDebugString)
-model.save("myModelPath")
-val sameModel = DecisionTreeModel.load("myModelPath")
+// Save and load model
+model.save(sc, "myModelPath")
+val sameModel = DecisionTreeModel.load(sc, "myModelPath")
{% endhighlight %}
</div>
@@ -284,8 +285,9 @@ Double testErr =
System.out.println("Test Error: " + testErr);
System.out.println("Learned classification tree model:\n" + model.toDebugString());
-model.save("myModelPath");
-DecisionTreeModel sameModel = DecisionTreeModel.load("myModelPath");
+// Save and load model
+model.save(sc.sc(), "myModelPath");
+DecisionTreeModel sameModel = DecisionTreeModel.load(sc.sc(), "myModelPath");
{% endhighlight %}
</div>
@@ -362,8 +364,9 @@ val testMSE = labelsAndPredictions.map{ case(v, p) => math.pow((v - p), 2)}.mean
println("Test Mean Squared Error = " + testMSE)
println("Learned regression tree model:\n" + model.toDebugString)
-model.save("myModelPath")
-val sameModel = DecisionTreeModel.load("myModelPath")
+// Save and load model
+model.save(sc, "myModelPath")
+val sameModel = DecisionTreeModel.load(sc, "myModelPath")
{% endhighlight %}
</div>
@@ -429,8 +432,9 @@ Double testMSE =
System.out.println("Test Mean Squared Error: " + testMSE);
System.out.println("Learned regression tree model:\n" + model.toDebugString());
-model.save("myModelPath");
-DecisionTreeModel sameModel = DecisionTreeModel.load("myModelPath");
+// Save and load model
+model.save(sc.sc(), "myModelPath");
+DecisionTreeModel sameModel = DecisionTreeModel.load(sc.sc(), "myModelPath");
{% endhighlight %}
</div>
diff --git a/docs/mllib-ensembles.md b/docs/mllib-ensembles.md
index ddae84165f..ec1ef38b45 100644
--- a/docs/mllib-ensembles.md
+++ b/docs/mllib-ensembles.md
@@ -129,8 +129,9 @@ val testErr = labelAndPreds.filter(r => r._1 != r._2).count.toDouble / testData.
println("Test Error = " + testErr)
println("Learned classification forest model:\n" + model.toDebugString)
-model.save("myModelPath")
-val sameModel = RandomForestModel.load("myModelPath")
+// Save and load model
+model.save(sc, "myModelPath")
+val sameModel = RandomForestModel.load(sc, "myModelPath")
{% endhighlight %}
</div>
@@ -193,8 +194,9 @@ Double testErr =
System.out.println("Test Error: " + testErr);
System.out.println("Learned classification forest model:\n" + model.toDebugString());
-model.save("myModelPath");
-RandomForestModel sameModel = RandomForestModel.load("myModelPath");
+// Save and load model
+model.save(sc.sc(), "myModelPath");
+RandomForestModel sameModel = RandomForestModel.load(sc.sc(), "myModelPath");
{% endhighlight %}
</div>
@@ -276,8 +278,9 @@ val testMSE = labelsAndPredictions.map{ case(v, p) => math.pow((v - p), 2)}.mean
println("Test Mean Squared Error = " + testMSE)
println("Learned regression forest model:\n" + model.toDebugString)
-model.save("myModelPath")
-val sameModel = RandomForestModel.load("myModelPath")
+// Save and load model
+model.save(sc, "myModelPath")
+val sameModel = RandomForestModel.load(sc, "myModelPath")
{% endhighlight %}
</div>
@@ -343,8 +346,9 @@ Double testMSE =
System.out.println("Test Mean Squared Error: " + testMSE);
System.out.println("Learned regression forest model:\n" + model.toDebugString());
-model.save("myModelPath");
-RandomForestModel sameModel = RandomForestModel.load("myModelPath");
+// Save and load model
+model.save(sc.sc(), "myModelPath");
+RandomForestModel sameModel = RandomForestModel.load(sc.sc(), "myModelPath");
{% endhighlight %}
</div>
@@ -504,8 +508,9 @@ val testErr = labelAndPreds.filter(r => r._1 != r._2).count.toDouble / testData.
println("Test Error = " + testErr)
println("Learned classification GBT model:\n" + model.toDebugString)
-model.save("myModelPath")
-val sameModel = GradientBoostedTreesModel.load("myModelPath")
+// Save and load model
+model.save(sc, "myModelPath")
+val sameModel = GradientBoostedTreesModel.load(sc, "myModelPath")
{% endhighlight %}
</div>
@@ -568,8 +573,9 @@ Double testErr =
System.out.println("Test Error: " + testErr);
System.out.println("Learned classification GBT model:\n" + model.toDebugString());
-model.save("myModelPath");
-GradientBoostedTreesModel sameModel = GradientBoostedTreesModel.load("myModelPath");
+// Save and load model
+model.save(sc.sc(), "myModelPath");
+GradientBoostedTreesModel sameModel = GradientBoostedTreesModel.load(sc.sc(), "myModelPath");
{% endhighlight %}
</div>
@@ -647,8 +653,9 @@ val testMSE = labelsAndPredictions.map{ case(v, p) => math.pow((v - p), 2)}.mean
println("Test Mean Squared Error = " + testMSE)
println("Learned regression GBT model:\n" + model.toDebugString)
-model.save("myModelPath")
-val sameModel = GradientBoostedTreesModel.load("myModelPath")
+// Save and load model
+model.save(sc, "myModelPath")
+val sameModel = GradientBoostedTreesModel.load(sc, "myModelPath")
{% endhighlight %}
</div>
@@ -717,8 +724,9 @@ Double testMSE =
System.out.println("Test Mean Squared Error: " + testMSE);
System.out.println("Learned regression GBT model:\n" + model.toDebugString());
-model.save("myModelPath");
-GradientBoostedTreesModel sameModel = GradientBoostedTreesModel.load("myModelPath");
+// Save and load model
+model.save(sc.sc(), "myModelPath");
+GradientBoostedTreesModel sameModel = GradientBoostedTreesModel.load(sc.sc(), "myModelPath");
{% endhighlight %}
</div>
diff --git a/docs/mllib-linear-methods.md b/docs/mllib-linear-methods.md
index d9fc63b37d..ffbd7ef1bf 100644
--- a/docs/mllib-linear-methods.md
+++ b/docs/mllib-linear-methods.md
@@ -223,8 +223,9 @@ val auROC = metrics.areaUnderROC()
println("Area under ROC = " + auROC)
-model.save("myModelPath")
-val sameModel = SVMModel.load("myModelPath")
+// Save and load model
+model.save(sc, "myModelPath")
+val sameModel = SVMModel.load(sc, "myModelPath")
{% endhighlight %}
The `SVMWithSGD.train()` method by default performs L2 regularization with the
@@ -308,8 +309,9 @@ public class SVMClassifier {
System.out.println("Area under ROC = " + auROC);
- model.save("myModelPath");
- SVMModel sameModel = SVMModel.load("myModelPath");
+ // Save and load model
+ model.save(sc.sc(), "myModelPath");
+ SVMModel sameModel = SVMModel.load(sc.sc(), "myModelPath");
}
}
{% endhighlight %}
@@ -423,8 +425,9 @@ val valuesAndPreds = parsedData.map { point =>
val MSE = valuesAndPreds.map{case(v, p) => math.pow((v - p), 2)}.mean()
println("training Mean Squared Error = " + MSE)
-model.save("myModelPath")
-val sameModel = LinearRegressionModel.load("myModelPath")
+// Save and load model
+model.save(sc, "myModelPath")
+val sameModel = LinearRegressionModel.load(sc, "myModelPath")
{% endhighlight %}
[`RidgeRegressionWithSGD`](api/scala/index.html#org.apache.spark.mllib.regression.RidgeRegressionWithSGD)
@@ -496,8 +499,9 @@ public class LinearRegression {
).rdd()).mean();
System.out.println("training Mean Squared Error = " + MSE);
- model.save("myModelPath");
- LinearRegressionModel sameModel = LinearRegressionModel.load("myModelPath");
+ // Save and load model
+ model.save(sc.sc(), "myModelPath");
+ LinearRegressionModel sameModel = LinearRegressionModel.load(sc.sc(), "myModelPath");
}
}
{% endhighlight %}
diff --git a/docs/mllib-naive-bayes.md b/docs/mllib-naive-bayes.md
index 81173255b5..5224a0b49a 100644
--- a/docs/mllib-naive-bayes.md
+++ b/docs/mllib-naive-bayes.md
@@ -56,8 +56,9 @@ val model = NaiveBayes.train(training, lambda = 1.0)
val predictionAndLabel = test.map(p => (model.predict(p.features), p.label))
val accuracy = 1.0 * predictionAndLabel.filter(x => x._1 == x._2).count() / test.count()
-model.save("myModelPath")
-val sameModel = NaiveBayesModel.load("myModelPath")
+// Save and load model
+model.save(sc, "myModelPath")
+val sameModel = NaiveBayesModel.load(sc, "myModelPath")
{% endhighlight %}
</div>
@@ -97,8 +98,9 @@ double accuracy = predictionAndLabel.filter(new Function<Tuple2<Double, Double>,
}
}).count() / (double) test.count();
-model.save("myModelPath");
-NaiveBayesModel sameModel = NaiveBayesModel.load("myModelPath");
+// Save and load model
+model.save(sc.sc(), "myModelPath");
+NaiveBayesModel sameModel = NaiveBayesModel.load(sc.sc(), "myModelPath");
{% endhighlight %}
</div>