From f3f94677a2cede1fa6577fb60729d4271e6a1293 Mon Sep 17 00:00:00 2001 From: Patrick Wendell Date: Mon, 3 Mar 2014 00:37:16 +0000 Subject: Various doc updates that haven't been pushed to the live 0.9 site. --- site/docs/0.9.0/README.md | 2 +- site/docs/0.9.0/api.html | 10 -------- .../api/core/org/apache/spark/Accumulator.html | 4 +-- .../api/core/org/apache/spark/Partitioner$.html | 2 +- .../core/org/apache/spark/scheduler/StageInfo.html | 4 ++- .../core/org/apache/spark/scheduler/package.html | 2 +- site/docs/0.9.0/api/mllib/index.html | 4 +-- site/docs/0.9.0/api/mllib/index.js | 2 +- site/docs/0.9.0/api/mllib/index/index-m.html | 4 +-- .../apache/spark/mllib/recommendation/package.html | 13 ---------- .../mllib/org/apache/spark/mllib/util/package.html | 13 ++++++++++ site/docs/0.9.0/api/pyspark/class-tree.html | 2 +- site/docs/0.9.0/api/pyspark/help.html | 2 +- site/docs/0.9.0/api/pyspark/identifier-index.html | 2 +- site/docs/0.9.0/api/pyspark/module-tree.html | 2 +- site/docs/0.9.0/api/pyspark/pyspark-module.html | 2 +- site/docs/0.9.0/api/pyspark/pyspark-pysrc.html | 2 +- .../api/pyspark/pyspark.accumulators-module.html | 2 +- .../api/pyspark/pyspark.accumulators-pysrc.html | 2 +- .../pyspark.accumulators.Accumulator-class.html | 2 +- ...yspark.accumulators.AccumulatorParam-class.html | 2 +- ....accumulators.AddingAccumulatorParam-class.html | 2 +- .../api/pyspark/pyspark.broadcast-module.html | 2 +- .../0.9.0/api/pyspark/pyspark.broadcast-pysrc.html | 2 +- .../pyspark/pyspark.broadcast.Broadcast-class.html | 2 +- .../0.9.0/api/pyspark/pyspark.conf-module.html | 2 +- .../docs/0.9.0/api/pyspark/pyspark.conf-pysrc.html | 2 +- .../api/pyspark/pyspark.conf.SparkConf-class.html | 2 +- .../0.9.0/api/pyspark/pyspark.context-module.html | 2 +- .../0.9.0/api/pyspark/pyspark.context-pysrc.html | 2 +- .../pyspark.context.SparkContext-class.html | 2 +- .../0.9.0/api/pyspark/pyspark.files-module.html | 2 +- .../0.9.0/api/pyspark/pyspark.files-pysrc.html | 2 +- .../pyspark/pyspark.files.SparkFiles-class.html | 2 +- .../0.9.0/api/pyspark/pyspark.mllib-module.html | 2 +- .../0.9.0/api/pyspark/pyspark.mllib-pysrc.html | 2 +- .../pyspark.mllib.classification-module.html | 2 +- .../pyspark.mllib.classification-pysrc.html | 2 +- ...assification.LogisticRegressionModel-class.html | 2 +- ...sification.LogisticRegressionWithSGD-class.html | 2 +- ...park.mllib.classification.NaiveBayes-class.html | 2 +- ...mllib.classification.NaiveBayesModel-class.html | 2 +- ...yspark.mllib.classification.SVMModel-class.html | 2 +- ...park.mllib.classification.SVMWithSGD-class.html | 2 +- .../pyspark/pyspark.mllib.clustering-module.html | 2 +- .../pyspark/pyspark.mllib.clustering-pysrc.html | 2 +- .../pyspark.mllib.clustering.KMeans-class.html | 2 +- ...pyspark.mllib.clustering.KMeansModel-class.html | 2 +- .../pyspark.mllib.recommendation-module.html | 2 +- .../pyspark.mllib.recommendation-pysrc.html | 2 +- .../pyspark.mllib.recommendation.ALS-class.html | 2 +- ...ommendation.MatrixFactorizationModel-class.html | 2 +- .../pyspark/pyspark.mllib.regression-module.html | 2 +- .../pyspark/pyspark.mllib.regression-pysrc.html | 2 +- .../pyspark.mllib.regression.LassoModel-class.html | 2 +- ...yspark.mllib.regression.LassoWithSGD-class.html | 2 +- ...pyspark.mllib.regression.LinearModel-class.html | 2 +- ...lib.regression.LinearRegressionModel-class.html | 2 +- ...regression.LinearRegressionModelBase-class.html | 2 +- ...b.regression.LinearRegressionWithSGD-class.html | 2 +- ...llib.regression.RidgeRegressionModel-class.html | 2 +- ...ib.regression.RidgeRegressionWithSGD-class.html | 2 +- .../docs/0.9.0/api/pyspark/pyspark.rdd-module.html | 2 +- site/docs/0.9.0/api/pyspark/pyspark.rdd-pysrc.html | 2 +- .../0.9.0/api/pyspark/pyspark.rdd.RDD-class.html | 2 +- .../api/pyspark/pyspark.serializers-module.html | 2 +- .../api/pyspark/pyspark.serializers-pysrc.html | 2 +- ...yspark.serializers.MarshalSerializer-class.html | 2 +- ...pyspark.serializers.PickleSerializer-class.html | 2 +- .../api/pyspark/pyspark.statcounter-module.html | 2 +- .../api/pyspark/pyspark.statcounter-pysrc.html | 2 +- .../pyspark.statcounter.StatCounter-class.html | 2 +- .../api/pyspark/pyspark.storagelevel-module.html | 2 +- .../api/pyspark/pyspark.storagelevel-pysrc.html | 2 +- .../pyspark.storagelevel.StorageLevel-class.html | 2 +- site/docs/0.9.0/bagel-programming-guide.html | 12 +-------- site/docs/0.9.0/building-with-maven.html | 10 -------- site/docs/0.9.0/cluster-overview.html | 10 -------- site/docs/0.9.0/configuration.html | 16 ++---------- site/docs/0.9.0/contributing-to-spark.html | 10 -------- site/docs/0.9.0/ec2-scripts.html | 10 -------- site/docs/0.9.0/graphx-programming-guide.html | 10 -------- .../0.9.0/hadoop-third-party-distributions.html | 10 -------- site/docs/0.9.0/hardware-provisioning.html | 10 -------- site/docs/0.9.0/index.html | 24 ++++++------------ site/docs/0.9.0/java-programming-guide.html | 12 +-------- site/docs/0.9.0/job-scheduling.html | 10 -------- site/docs/0.9.0/js/main.js | 29 +--------------------- site/docs/0.9.0/mllib-guide.html | 10 -------- site/docs/0.9.0/monitoring.html | 10 -------- site/docs/0.9.0/python-programming-guide.html | 10 -------- site/docs/0.9.0/quick-start.html | 10 -------- site/docs/0.9.0/running-on-mesos.html | 10 -------- site/docs/0.9.0/running-on-yarn.html | 10 -------- site/docs/0.9.0/scala-programming-guide.html | 16 +++--------- site/docs/0.9.0/spark-debugger.html | 14 ++--------- site/docs/0.9.0/spark-standalone.html | 10 -------- site/docs/0.9.0/streaming-custom-receivers.html | 10 -------- site/docs/0.9.0/streaming-programming-guide.html | 24 ++++++------------ site/docs/0.9.0/tuning.html | 10 -------- 100 files changed, 114 insertions(+), 391 deletions(-) (limited to 'site/docs') diff --git a/site/docs/0.9.0/README.md b/site/docs/0.9.0/README.md index dfcf75355..0b7c32409 100644 --- a/site/docs/0.9.0/README.md +++ b/site/docs/0.9.0/README.md @@ -1,6 +1,6 @@ Welcome to the Spark documentation! -This readme will walk you through navigating and building the Spark documentation, which is included here with the Spark source code. You can also find documentation specific to release versions of Spark at http://spark.incubator.apache.org/documentation.html. +This readme will walk you through navigating and building the Spark documentation, which is included here with the Spark source code. You can also find documentation specific to release versions of Spark at http://spark.apache.org/documentation.html. Read on to learn more about viewing documentation in plain text (i.e., markdown) or building the documentation yourself. Why build it yourself? So that you have the docs that corresponds to whichever version of Spark you currently have checked out of revision control. diff --git a/site/docs/0.9.0/api.html b/site/docs/0.9.0/api.html index 263188cf2..80163501a 100644 --- a/site/docs/0.9.0/api.html +++ b/site/docs/0.9.0/api.html @@ -169,16 +169,6 @@
--> - - diff --git a/site/docs/0.9.0/api/core/org/apache/spark/Accumulator.html b/site/docs/0.9.0/api/core/org/apache/spark/Accumulator.html index c883ab40b..0ac285f42 100644 --- a/site/docs/0.9.0/api/core/org/apache/spark/Accumulator.html +++ b/site/docs/0.9.0/api/core/org/apache/spark/Accumulator.html @@ -42,8 +42,8 @@

A simpler value of Accumulable where the result type being accumulated is the same as the types of elements being merged, i.e. variables that are only "added" to through an associative operation and can therefore be efficiently supported in parallel. They can be used -to implement counters (as in MapReduce) or sums. Spark natively supports accumulators of type -Int and Double, and programmers can add support for new types.

An accumulator is created from an initial value v by calling SparkContext#accumulator. +to implement counters (as in MapReduce) or sums. Spark natively supports accumulators of numeric +value types, and programmers can add support for new types.

An accumulator is created from an initial value v by calling SparkContext#accumulator. Tasks running on the cluster can then add to it using the Accumulable#+= operator. However, they cannot read its value. Only the driver program can read the accumulator's value, using its value method.

The interpreter session below shows an accumulator being used to add up the elements of an array:

scala> val accum = sc.accumulator(0)
diff --git a/site/docs/0.9.0/api/core/org/apache/spark/Partitioner$.html b/site/docs/0.9.0/api/core/org/apache/spark/Partitioner$.html
index 952dce86e..5fcd7611c 100644
--- a/site/docs/0.9.0/api/core/org/apache/spark/Partitioner$.html
+++ b/site/docs/0.9.0/api/core/org/apache/spark/Partitioner$.html
@@ -196,7 +196,7 @@
       
       

Choose a partitioner to use for a cogroup-like operation between a number of RDDs.

Choose a partitioner to use for a cogroup-like operation between a number of RDDs.

If any of the RDDs already has a partitioner, choose that one.

Otherwise, we use a default HashPartitioner. For the number of partitions, if spark.default.parallelism is set, then we'll use the value from SparkContext -defaultParallelism, otherwise we'll use the max number of upstream partitions.

Unless spark.default.parallelism is set, He number of partitions will be the +defaultParallelism, otherwise we'll use the max number of upstream partitions.

Unless spark.default.parallelism is set, the number of partitions will be the same as the number of partitions in the largest upstream RDD, as this should be least likely to cause out-of-memory errors.

We use two method parameters (rdd, others) to enforce callers passing at least 1 RDD.

diff --git a/site/docs/0.9.0/api/core/org/apache/spark/scheduler/StageInfo.html b/site/docs/0.9.0/api/core/org/apache/spark/scheduler/StageInfo.html index 080a3a58b..b8db679db 100644 --- a/site/docs/0.9.0/api/core/org/apache/spark/scheduler/StageInfo.html +++ b/site/docs/0.9.0/api/core/org/apache/spark/scheduler/StageInfo.html @@ -39,7 +39,9 @@ -
+

Stores information about a stage to pass from the scheduler to SparkListeners.

taskInfos stores the metrics for all tasks that have completed, including redundant, speculated +tasks. +

Linear Supertypes
AnyRef, Any
diff --git a/site/docs/0.9.0/api/core/org/apache/spark/scheduler/package.html b/site/docs/0.9.0/api/core/org/apache/spark/scheduler/package.html index 31de15b16..14541b536 100644 --- a/site/docs/0.9.0/api/core/org/apache/spark/scheduler/package.html +++ b/site/docs/0.9.0/api/core/org/apache/spark/scheduler/package.html @@ -226,7 +226,7 @@ StageInfo extends AnyRef - +

Stores information about a stage to pass from the scheduler to SparkListeners.

  • diff --git a/site/docs/0.9.0/api/mllib/index.html b/site/docs/0.9.0/api/mllib/index.html index 9fbbc76db..ab3ad7cae 100644 --- a/site/docs/0.9.0/api/mllib/index.html +++ b/site/docs/0.9.0/api/mllib/index.html @@ -60,7 +60,7 @@
  • org.apache.spark.mllib.recommendation -
    1. (object)(class)ALS
    2. (class)MatrixFactorizationModel
    3. (object)
      MFDataGenerator
    4. (case class)Rating
    +
    1. (object)(class)ALS
    2. (class)MatrixFactorizationModel
    3. (case class)Rating
  • org.apache.spark.mllib.regression @@ -68,7 +68,7 @@
  • org.apache.spark.mllib.util -
    1. (object)
      DataValidators
    2. (object)
      KMeansDataGenerator
    3. (object)
      LinearDataGenerator
    4. (object)
      LogisticRegressionDataGenerator
    5. (object)
      MLUtils
    6. (object)
      SVMDataGenerator
    +
    1. (object)
      DataValidators
    2. (object)
      KMeansDataGenerator
    3. (object)
      LinearDataGenerator
    4. (object)
      LogisticRegressionDataGenerator
    5. (object)
      MFDataGenerator
    6. (object)
      MLUtils
    7. (object)
      SVMDataGenerator
  • diff --git a/site/docs/0.9.0/api/mllib/index.js b/site/docs/0.9.0/api/mllib/index.js index 2553a6479..4e48b0980 100644 --- a/site/docs/0.9.0/api/mllib/index.js +++ b/site/docs/0.9.0/api/mllib/index.js @@ -1 +1 @@ -Index.PACKAGES = {"org.apache.spark.mllib.api" : [], "org.apache.spark.mllib.classification" : [{"trait" : "org\/apache\/spark\/mllib\/classification\/ClassificationModel.html", "name" : "org.apache.spark.mllib.classification.ClassificationModel"}, {"class" : "org\/apache\/spark\/mllib\/classification\/LogisticRegressionModel.html", "name" : "org.apache.spark.mllib.classification.LogisticRegressionModel"}, {"object" : "org\/apache\/spark\/mllib\/classification\/LogisticRegressionWithSGD$.html", "class" : "org\/apache\/spark\/mllib\/classification\/LogisticRegressionWithSGD.html", "name" : "org.apache.spark.mllib.classification.LogisticRegressionWithSGD"}, {"object" : "org\/apache\/spark\/mllib\/classification\/NaiveBayes$.html", "class" : "org\/apache\/spark\/mllib\/classification\/NaiveBayes.html", "name" : "org.apache.spark.mllib.classification.NaiveBayes"}, {"class" : "org\/apache\/spark\/mllib\/classification\/NaiveBayesModel.html", "name" : "org.apache.spark.mllib.classification.NaiveBayesModel"}, {"class" : "org\/apache\/spark\/mllib\/classification\/SVMModel.html", "name" : "org.apache.spark.mllib.classification.SVMModel"}, {"object" : "org\/apache\/spark\/mllib\/classification\/SVMWithSGD$.html", "class" : "org\/apache\/spark\/mllib\/classification\/SVMWithSGD.html", "name" : "org.apache.spark.mllib.classification.SVMWithSGD"}], "org.apache.spark" : [], "org.apache" : [], "org.apache.spark.mllib.clustering" : [{"object" : "org\/apache\/spark\/mllib\/clustering\/KMeans$.html", "class" : "org\/apache\/spark\/mllib\/clustering\/KMeans.html", "name" : "org.apache.spark.mllib.clustering.KMeans"}, {"class" : "org\/apache\/spark\/mllib\/clustering\/KMeansModel.html", "name" : "org.apache.spark.mllib.clustering.KMeansModel"}], "org.apache.spark.mllib.util" : [{"object" : "org\/apache\/spark\/mllib\/util\/DataValidators$.html", "name" : "org.apache.spark.mllib.util.DataValidators"}, {"object" : "org\/apache\/spark\/mllib\/util\/KMeansDataGenerator$.html", "name" : "org.apache.spark.mllib.util.KMeansDataGenerator"}, {"object" : "org\/apache\/spark\/mllib\/util\/LinearDataGenerator$.html", "name" : "org.apache.spark.mllib.util.LinearDataGenerator"}, {"object" : "org\/apache\/spark\/mllib\/util\/LogisticRegressionDataGenerator$.html", "name" : "org.apache.spark.mllib.util.LogisticRegressionDataGenerator"}, {"object" : "org\/apache\/spark\/mllib\/util\/MLUtils$.html", "name" : "org.apache.spark.mllib.util.MLUtils"}, {"object" : "org\/apache\/spark\/mllib\/util\/SVMDataGenerator$.html", "name" : "org.apache.spark.mllib.util.SVMDataGenerator"}], "org.apache.spark.mllib.recommendation" : [{"object" : "org\/apache\/spark\/mllib\/recommendation\/ALS$.html", "class" : "org\/apache\/spark\/mllib\/recommendation\/ALS.html", "name" : "org.apache.spark.mllib.recommendation.ALS"}, {"class" : "org\/apache\/spark\/mllib\/recommendation\/MatrixFactorizationModel.html", "name" : "org.apache.spark.mllib.recommendation.MatrixFactorizationModel"}, {"object" : "org\/apache\/spark\/mllib\/recommendation\/MFDataGenerator$.html", "name" : "org.apache.spark.mllib.recommendation.MFDataGenerator"}, {"case class" : "org\/apache\/spark\/mllib\/recommendation\/Rating.html", "name" : "org.apache.spark.mllib.recommendation.Rating"}], "org.apache.spark.mllib.optimization" : [{"class" : "org\/apache\/spark\/mllib\/optimization\/Gradient.html", "name" : "org.apache.spark.mllib.optimization.Gradient"}, {"object" : "org\/apache\/spark\/mllib\/optimization\/GradientDescent$.html", "class" : "org\/apache\/spark\/mllib\/optimization\/GradientDescent.html", "name" : "org.apache.spark.mllib.optimization.GradientDescent"}, {"class" : "org\/apache\/spark\/mllib\/optimization\/HingeGradient.html", "name" : "org.apache.spark.mllib.optimization.HingeGradient"}, {"class" : "org\/apache\/spark\/mllib\/optimization\/L1Updater.html", "name" : "org.apache.spark.mllib.optimization.L1Updater"}, {"class" : "org\/apache\/spark\/mllib\/optimization\/LogisticGradient.html", "name" : "org.apache.spark.mllib.optimization.LogisticGradient"}, {"trait" : "org\/apache\/spark\/mllib\/optimization\/Optimizer.html", "name" : "org.apache.spark.mllib.optimization.Optimizer"}, {"class" : "org\/apache\/spark\/mllib\/optimization\/SimpleUpdater.html", "name" : "org.apache.spark.mllib.optimization.SimpleUpdater"}, {"class" : "org\/apache\/spark\/mllib\/optimization\/SquaredGradient.html", "name" : "org.apache.spark.mllib.optimization.SquaredGradient"}, {"class" : "org\/apache\/spark\/mllib\/optimization\/SquaredL2Updater.html", "name" : "org.apache.spark.mllib.optimization.SquaredL2Updater"}, {"class" : "org\/apache\/spark\/mllib\/optimization\/Updater.html", "name" : "org.apache.spark.mllib.optimization.Updater"}], "org.apache.spark.mllib.api.python" : [{"class" : "org\/apache\/spark\/mllib\/api\/python\/PythonMLLibAPI.html", "name" : "org.apache.spark.mllib.api.python.PythonMLLibAPI"}], "org" : [], "org.apache.spark.mllib.regression" : [{"class" : "org\/apache\/spark\/mllib\/regression\/GeneralizedLinearAlgorithm.html", "name" : "org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm"}, {"class" : "org\/apache\/spark\/mllib\/regression\/GeneralizedLinearModel.html", "name" : "org.apache.spark.mllib.regression.GeneralizedLinearModel"}, {"case class" : "org\/apache\/spark\/mllib\/regression\/LabeledPoint.html", "name" : "org.apache.spark.mllib.regression.LabeledPoint"}, {"class" : "org\/apache\/spark\/mllib\/regression\/LassoModel.html", "name" : "org.apache.spark.mllib.regression.LassoModel"}, {"object" : "org\/apache\/spark\/mllib\/regression\/LassoWithSGD$.html", "class" : "org\/apache\/spark\/mllib\/regression\/LassoWithSGD.html", "name" : "org.apache.spark.mllib.regression.LassoWithSGD"}, {"class" : "org\/apache\/spark\/mllib\/regression\/LinearRegressionModel.html", "name" : "org.apache.spark.mllib.regression.LinearRegressionModel"}, {"object" : "org\/apache\/spark\/mllib\/regression\/LinearRegressionWithSGD$.html", "class" : "org\/apache\/spark\/mllib\/regression\/LinearRegressionWithSGD.html", "name" : "org.apache.spark.mllib.regression.LinearRegressionWithSGD"}, {"trait" : "org\/apache\/spark\/mllib\/regression\/RegressionModel.html", "name" : "org.apache.spark.mllib.regression.RegressionModel"}, {"class" : "org\/apache\/spark\/mllib\/regression\/RidgeRegressionModel.html", "name" : "org.apache.spark.mllib.regression.RidgeRegressionModel"}, {"object" : "org\/apache\/spark\/mllib\/regression\/RidgeRegressionWithSGD$.html", "class" : "org\/apache\/spark\/mllib\/regression\/RidgeRegressionWithSGD.html", "name" : "org.apache.spark.mllib.regression.RidgeRegressionWithSGD"}], "org.apache.spark.mllib" : []}; \ No newline at end of file +Index.PACKAGES = {"org.apache.spark.mllib.api" : [], "org.apache.spark.mllib.classification" : [{"trait" : "org\/apache\/spark\/mllib\/classification\/ClassificationModel.html", "name" : "org.apache.spark.mllib.classification.ClassificationModel"}, {"class" : "org\/apache\/spark\/mllib\/classification\/LogisticRegressionModel.html", "name" : "org.apache.spark.mllib.classification.LogisticRegressionModel"}, {"object" : "org\/apache\/spark\/mllib\/classification\/LogisticRegressionWithSGD$.html", "class" : "org\/apache\/spark\/mllib\/classification\/LogisticRegressionWithSGD.html", "name" : "org.apache.spark.mllib.classification.LogisticRegressionWithSGD"}, {"object" : "org\/apache\/spark\/mllib\/classification\/NaiveBayes$.html", "class" : "org\/apache\/spark\/mllib\/classification\/NaiveBayes.html", "name" : "org.apache.spark.mllib.classification.NaiveBayes"}, {"class" : "org\/apache\/spark\/mllib\/classification\/NaiveBayesModel.html", "name" : "org.apache.spark.mllib.classification.NaiveBayesModel"}, {"class" : "org\/apache\/spark\/mllib\/classification\/SVMModel.html", "name" : "org.apache.spark.mllib.classification.SVMModel"}, {"object" : "org\/apache\/spark\/mllib\/classification\/SVMWithSGD$.html", "class" : "org\/apache\/spark\/mllib\/classification\/SVMWithSGD.html", "name" : "org.apache.spark.mllib.classification.SVMWithSGD"}], "org.apache.spark" : [], "org.apache" : [], "org.apache.spark.mllib.clustering" : [{"object" : "org\/apache\/spark\/mllib\/clustering\/KMeans$.html", "class" : "org\/apache\/spark\/mllib\/clustering\/KMeans.html", "name" : "org.apache.spark.mllib.clustering.KMeans"}, {"class" : "org\/apache\/spark\/mllib\/clustering\/KMeansModel.html", "name" : "org.apache.spark.mllib.clustering.KMeansModel"}], "org.apache.spark.mllib.util" : [{"object" : "org\/apache\/spark\/mllib\/util\/DataValidators$.html", "name" : "org.apache.spark.mllib.util.DataValidators"}, {"object" : "org\/apache\/spark\/mllib\/util\/KMeansDataGenerator$.html", "name" : "org.apache.spark.mllib.util.KMeansDataGenerator"}, {"object" : "org\/apache\/spark\/mllib\/util\/LinearDataGenerator$.html", "name" : "org.apache.spark.mllib.util.LinearDataGenerator"}, {"object" : "org\/apache\/spark\/mllib\/util\/LogisticRegressionDataGenerator$.html", "name" : "org.apache.spark.mllib.util.LogisticRegressionDataGenerator"}, {"object" : "org\/apache\/spark\/mllib\/util\/MFDataGenerator$.html", "name" : "org.apache.spark.mllib.util.MFDataGenerator"}, {"object" : "org\/apache\/spark\/mllib\/util\/MLUtils$.html", "name" : "org.apache.spark.mllib.util.MLUtils"}, {"object" : "org\/apache\/spark\/mllib\/util\/SVMDataGenerator$.html", "name" : "org.apache.spark.mllib.util.SVMDataGenerator"}], "org.apache.spark.mllib.recommendation" : [{"object" : "org\/apache\/spark\/mllib\/recommendation\/ALS$.html", "class" : "org\/apache\/spark\/mllib\/recommendation\/ALS.html", "name" : "org.apache.spark.mllib.recommendation.ALS"}, {"class" : "org\/apache\/spark\/mllib\/recommendation\/MatrixFactorizationModel.html", "name" : "org.apache.spark.mllib.recommendation.MatrixFactorizationModel"}, {"case class" : "org\/apache\/spark\/mllib\/recommendation\/Rating.html", "name" : "org.apache.spark.mllib.recommendation.Rating"}], "org.apache.spark.mllib.optimization" : [{"class" : "org\/apache\/spark\/mllib\/optimization\/Gradient.html", "name" : "org.apache.spark.mllib.optimization.Gradient"}, {"object" : "org\/apache\/spark\/mllib\/optimization\/GradientDescent$.html", "class" : "org\/apache\/spark\/mllib\/optimization\/GradientDescent.html", "name" : "org.apache.spark.mllib.optimization.GradientDescent"}, {"class" : "org\/apache\/spark\/mllib\/optimization\/HingeGradient.html", "name" : "org.apache.spark.mllib.optimization.HingeGradient"}, {"class" : "org\/apache\/spark\/mllib\/optimization\/L1Updater.html", "name" : "org.apache.spark.mllib.optimization.L1Updater"}, {"class" : "org\/apache\/spark\/mllib\/optimization\/LogisticGradient.html", "name" : "org.apache.spark.mllib.optimization.LogisticGradient"}, {"trait" : "org\/apache\/spark\/mllib\/optimization\/Optimizer.html", "name" : "org.apache.spark.mllib.optimization.Optimizer"}, {"class" : "org\/apache\/spark\/mllib\/optimization\/SimpleUpdater.html", "name" : "org.apache.spark.mllib.optimization.SimpleUpdater"}, {"class" : "org\/apache\/spark\/mllib\/optimization\/SquaredGradient.html", "name" : "org.apache.spark.mllib.optimization.SquaredGradient"}, {"class" : "org\/apache\/spark\/mllib\/optimization\/SquaredL2Updater.html", "name" : "org.apache.spark.mllib.optimization.SquaredL2Updater"}, {"class" : "org\/apache\/spark\/mllib\/optimization\/Updater.html", "name" : "org.apache.spark.mllib.optimization.Updater"}], "org.apache.spark.mllib.api.python" : [{"class" : "org\/apache\/spark\/mllib\/api\/python\/PythonMLLibAPI.html", "name" : "org.apache.spark.mllib.api.python.PythonMLLibAPI"}], "org" : [], "org.apache.spark.mllib.regression" : [{"class" : "org\/apache\/spark\/mllib\/regression\/GeneralizedLinearAlgorithm.html", "name" : "org.apache.spark.mllib.regression.GeneralizedLinearAlgorithm"}, {"class" : "org\/apache\/spark\/mllib\/regression\/GeneralizedLinearModel.html", "name" : "org.apache.spark.mllib.regression.GeneralizedLinearModel"}, {"case class" : "org\/apache\/spark\/mllib\/regression\/LabeledPoint.html", "name" : "org.apache.spark.mllib.regression.LabeledPoint"}, {"class" : "org\/apache\/spark\/mllib\/regression\/LassoModel.html", "name" : "org.apache.spark.mllib.regression.LassoModel"}, {"object" : "org\/apache\/spark\/mllib\/regression\/LassoWithSGD$.html", "class" : "org\/apache\/spark\/mllib\/regression\/LassoWithSGD.html", "name" : "org.apache.spark.mllib.regression.LassoWithSGD"}, {"class" : "org\/apache\/spark\/mllib\/regression\/LinearRegressionModel.html", "name" : "org.apache.spark.mllib.regression.LinearRegressionModel"}, {"object" : "org\/apache\/spark\/mllib\/regression\/LinearRegressionWithSGD$.html", "class" : "org\/apache\/spark\/mllib\/regression\/LinearRegressionWithSGD.html", "name" : "org.apache.spark.mllib.regression.LinearRegressionWithSGD"}, {"trait" : "org\/apache\/spark\/mllib\/regression\/RegressionModel.html", "name" : "org.apache.spark.mllib.regression.RegressionModel"}, {"class" : "org\/apache\/spark\/mllib\/regression\/RidgeRegressionModel.html", "name" : "org.apache.spark.mllib.regression.RidgeRegressionModel"}, {"object" : "org\/apache\/spark\/mllib\/regression\/RidgeRegressionWithSGD$.html", "class" : "org\/apache\/spark\/mllib\/regression\/RidgeRegressionWithSGD.html", "name" : "org.apache.spark.mllib.regression.RidgeRegressionWithSGD"}], "org.apache.spark.mllib" : []}; \ No newline at end of file diff --git a/site/docs/0.9.0/api/mllib/index/index-m.html b/site/docs/0.9.0/api/mllib/index/index-m.html index 0e394aafa..3a914942c 100644 --- a/site/docs/0.9.0/api/mllib/index/index-m.html +++ b/site/docs/0.9.0/api/mllib/index/index-m.html @@ -13,7 +13,7 @@
    MFDataGenerator
    - +
    MLUtils
    @@ -22,7 +22,7 @@
    main
    - +
    maxIterations
    diff --git a/site/docs/0.9.0/api/mllib/org/apache/spark/mllib/recommendation/package.html b/site/docs/0.9.0/api/mllib/org/apache/spark/mllib/recommendation/package.html index cc667f7b1..709042d36 100644 --- a/site/docs/0.9.0/api/mllib/org/apache/spark/mllib/recommendation/package.html +++ b/site/docs/0.9.0/api/mllib/org/apache/spark/mllib/recommendation/package.html @@ -117,19 +117,6 @@

    Top-level methods for calling Alternating Least Squares (ALS) matrix factorizaton.

    -
  • - - -

    - - - object - - - MFDataGenerator - -

    -

    Generate RDD(s) containing data for Matrix Factorization.

  • diff --git a/site/docs/0.9.0/api/mllib/org/apache/spark/mllib/util/package.html b/site/docs/0.9.0/api/mllib/org/apache/spark/mllib/util/package.html index babdae2c1..d0d20f4f5 100644 --- a/site/docs/0.9.0/api/mllib/org/apache/spark/mllib/util/package.html +++ b/site/docs/0.9.0/api/mllib/org/apache/spark/mllib/util/package.html @@ -114,6 +114,19 @@

    Generate test data for LogisticRegression.

    +
  • + + +

    + + + object + + + MFDataGenerator + +

    +

    Generate RDD(s) containing data for Matrix Factorization.

  • diff --git a/site/docs/0.9.0/api/pyspark/class-tree.html b/site/docs/0.9.0/api/pyspark/class-tree.html index e55d9b7cb..591f3950f 100644 --- a/site/docs/0.9.0/api/pyspark/class-tree.html +++ b/site/docs/0.9.0/api/pyspark/class-tree.html @@ -163,7 +163,7 @@