aboutsummaryrefslogtreecommitdiff
path: root/examples/src/main/java
diff options
context:
space:
mode:
authorXin Ren <iamshrek@126.com>2016-03-24 14:25:10 -0700
committerXiangrui Meng <meng@databricks.com>2016-03-24 14:25:10 -0700
commitd283223a5a75c53970e72a1016e0b237856b5ea1 (patch)
treea9e657985580ab63c900abfbe757602de7a59584 /examples/src/main/java
parent342079dc45425309798d6082cccef86858f08a77 (diff)
downloadspark-d283223a5a75c53970e72a1016e0b237856b5ea1.tar.gz
spark-d283223a5a75c53970e72a1016e0b237856b5ea1.tar.bz2
spark-d283223a5a75c53970e72a1016e0b237856b5ea1.zip
[SPARK-13017][DOCS] Replace example code in mllib-feature-extraction.md using include_example
Replace example code in mllib-feature-extraction.md using include_example https://issues.apache.org/jira/browse/SPARK-13017 The example code in the user guide is embedded in the markdown and hence it is not easy to test. It would be nice to automatically test them. This JIRA is to discuss options to automate example code testing and see what we can do in Spark 1.6. Goal is to move actual example code to spark/examples and test compilation in Jenkins builds. Then in the markdown, we can reference part of the code to show in the user guide. This requires adding a Jekyll tag that is similar to https://github.com/jekyll/jekyll/blob/master/lib/jekyll/tags/include.rb, e.g., called include_example. `{% include_example scala/org/apache/spark/examples/mllib/TFIDFExample.scala %}` Jekyll will find `examples/src/main/scala/org/apache/spark/examples/mllib/TFIDFExample.scala` and pick code blocks marked "example" and replace code block in `{% highlight %}` in the markdown. See more sub-tasks in parent ticket: https://issues.apache.org/jira/browse/SPARK-11337 Author: Xin Ren <iamshrek@126.com> Closes #11142 from keypointt/SPARK-13017.
Diffstat (limited to 'examples/src/main/java')
-rw-r--r--examples/src/main/java/org/apache/spark/examples/mllib/JavaChiSqSelectorExample.java83
-rw-r--r--examples/src/main/java/org/apache/spark/examples/mllib/JavaElementwiseProductExample.java78
2 files changed, 161 insertions, 0 deletions
diff --git a/examples/src/main/java/org/apache/spark/examples/mllib/JavaChiSqSelectorExample.java b/examples/src/main/java/org/apache/spark/examples/mllib/JavaChiSqSelectorExample.java
new file mode 100644
index 0000000000..ad44acb4cd
--- /dev/null
+++ b/examples/src/main/java/org/apache/spark/examples/mllib/JavaChiSqSelectorExample.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.examples.mllib;
+
+import org.apache.spark.SparkConf;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.VoidFunction;
+// $example on$
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.mllib.feature.ChiSqSelector;
+import org.apache.spark.mllib.feature.ChiSqSelectorModel;
+import org.apache.spark.mllib.linalg.Vectors;
+import org.apache.spark.mllib.regression.LabeledPoint;
+import org.apache.spark.mllib.util.MLUtils;
+// $example off$
+
+public class JavaChiSqSelectorExample {
+ public static void main(String[] args) {
+
+ SparkConf conf = new SparkConf().setAppName("JavaChiSqSelectorExample");
+ JavaSparkContext jsc = new JavaSparkContext(conf);
+
+ // $example on$
+ JavaRDD<LabeledPoint> points = MLUtils.loadLibSVMFile(jsc.sc(),
+ "data/mllib/sample_libsvm_data.txt").toJavaRDD().cache();
+
+ // Discretize data in 16 equal bins since ChiSqSelector requires categorical features
+ // Although features are doubles, the ChiSqSelector treats each unique value as a category
+ JavaRDD<LabeledPoint> discretizedData = points.map(
+ new Function<LabeledPoint, LabeledPoint>() {
+ @Override
+ public LabeledPoint call(LabeledPoint lp) {
+ final double[] discretizedFeatures = new double[lp.features().size()];
+ for (int i = 0; i < lp.features().size(); ++i) {
+ discretizedFeatures[i] = Math.floor(lp.features().apply(i) / 16);
+ }
+ return new LabeledPoint(lp.label(), Vectors.dense(discretizedFeatures));
+ }
+ }
+ );
+
+ // Create ChiSqSelector that will select top 50 of 692 features
+ ChiSqSelector selector = new ChiSqSelector(50);
+ // Create ChiSqSelector model (selecting features)
+ final ChiSqSelectorModel transformer = selector.fit(discretizedData.rdd());
+ // Filter the top 50 features from each feature vector
+ JavaRDD<LabeledPoint> filteredData = discretizedData.map(
+ new Function<LabeledPoint, LabeledPoint>() {
+ @Override
+ public LabeledPoint call(LabeledPoint lp) {
+ return new LabeledPoint(lp.label(), transformer.transform(lp.features()));
+ }
+ }
+ );
+ // $example off$
+
+ System.out.println("filtered data: ");
+ filteredData.foreach(new VoidFunction<LabeledPoint>() {
+ @Override
+ public void call(LabeledPoint labeledPoint) throws Exception {
+ System.out.println(labeledPoint.toString());
+ }
+ });
+
+ jsc.stop();
+ }
+}
diff --git a/examples/src/main/java/org/apache/spark/examples/mllib/JavaElementwiseProductExample.java b/examples/src/main/java/org/apache/spark/examples/mllib/JavaElementwiseProductExample.java
new file mode 100644
index 0000000000..c8ce6ab284
--- /dev/null
+++ b/examples/src/main/java/org/apache/spark/examples/mllib/JavaElementwiseProductExample.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.examples.mllib;
+
+// $example on$
+import java.util.Arrays;
+// $example off$
+
+import org.apache.spark.SparkConf;
+import org.apache.spark.api.java.JavaSparkContext;
+// $example on$
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.mllib.feature.ElementwiseProduct;
+import org.apache.spark.mllib.linalg.Vector;
+import org.apache.spark.mllib.linalg.Vectors;
+// $example off$
+import org.apache.spark.api.java.function.VoidFunction;
+
+public class JavaElementwiseProductExample {
+ public static void main(String[] args) {
+
+ SparkConf conf = new SparkConf().setAppName("JavaElementwiseProductExample");
+ JavaSparkContext jsc = new JavaSparkContext(conf);
+
+ // $example on$
+ // Create some vector data; also works for sparse vectors
+ JavaRDD<Vector> data = jsc.parallelize(Arrays.asList(
+ Vectors.dense(1.0, 2.0, 3.0), Vectors.dense(4.0, 5.0, 6.0)));
+ Vector transformingVector = Vectors.dense(0.0, 1.0, 2.0);
+ final ElementwiseProduct transformer = new ElementwiseProduct(transformingVector);
+
+ // Batch transform and per-row transform give the same results:
+ JavaRDD<Vector> transformedData = transformer.transform(data);
+ JavaRDD<Vector> transformedData2 = data.map(
+ new Function<Vector, Vector>() {
+ @Override
+ public Vector call(Vector v) {
+ return transformer.transform(v);
+ }
+ }
+ );
+ // $example off$
+
+ System.out.println("transformedData: ");
+ transformedData.foreach(new VoidFunction<Vector>() {
+ @Override
+ public void call(Vector vector) throws Exception {
+ System.out.println(vector.toString());
+ }
+ });
+
+ System.out.println("transformedData2: ");
+ transformedData2.foreach(new VoidFunction<Vector>() {
+ @Override
+ public void call(Vector vector) throws Exception {
+ System.out.println(vector.toString());
+ }
+ });
+
+ jsc.stop();
+ }
+}