aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--docs/ml-guide.md51
1 files changed, 43 insertions, 8 deletions
diff --git a/docs/ml-guide.md b/docs/ml-guide.md
index da6aef7f14..c08c76d226 100644
--- a/docs/ml-guide.md
+++ b/docs/ml-guide.md
@@ -408,31 +408,31 @@ import org.apache.spark.sql.SQLContext;
// Labeled and unlabeled instance types.
// Spark SQL can infer schema from Java Beans.
public class Document implements Serializable {
- private Long id;
+ private long id;
private String text;
- public Document(Long id, String text) {
+ public Document(long id, String text) {
this.id = id;
this.text = text;
}
- public Long getId() { return this.id; }
- public void setId(Long id) { this.id = id; }
+ public long getId() { return this.id; }
+ public void setId(long id) { this.id = id; }
public String getText() { return this.text; }
public void setText(String text) { this.text = text; }
}
public class LabeledDocument extends Document implements Serializable {
- private Double label;
+ private double label;
- public LabeledDocument(Long id, String text, Double label) {
+ public LabeledDocument(long id, String text, double label) {
super(id, text);
this.label = label;
}
- public Double getLabel() { return this.label; }
- public void setLabel(Double label) { this.label = label; }
+ public double getLabel() { return this.label; }
+ public void setLabel(double label) { this.label = label; }
}
// Set up contexts.
@@ -565,6 +565,11 @@ import org.apache.spark.ml.tuning.{ParamGridBuilder, CrossValidator}
import org.apache.spark.mllib.linalg.Vector
import org.apache.spark.sql.{Row, SQLContext}
+// Labeled and unlabeled instance types.
+// Spark SQL can infer schema from case classes.
+case class LabeledDocument(id: Long, text: String, label: Double)
+case class Document(id: Long, text: String)
+
val conf = new SparkConf().setAppName("CrossValidatorExample")
val sc = new SparkContext(conf)
val sqlContext = new SQLContext(sc)
@@ -655,6 +660,36 @@ import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
+// Labeled and unlabeled instance types.
+// Spark SQL can infer schema from Java Beans.
+public class Document implements Serializable {
+ private long id;
+ private String text;
+
+ public Document(long id, String text) {
+ this.id = id;
+ this.text = text;
+ }
+
+ public long getId() { return this.id; }
+ public void setId(long id) { this.id = id; }
+
+ public String getText() { return this.text; }
+ public void setText(String text) { this.text = text; }
+}
+
+public class LabeledDocument extends Document implements Serializable {
+ private double label;
+
+ public LabeledDocument(long id, String text, double label) {
+ super(id, text);
+ this.label = label;
+ }
+
+ public double getLabel() { return this.label; }
+ public void setLabel(double label) { this.label = label; }
+}
+
SparkConf conf = new SparkConf().setAppName("JavaCrossValidatorExample");
JavaSparkContext jsc = new JavaSparkContext(conf);
SQLContext jsql = new SQLContext(jsc);