aboutsummaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
authorWeiqing Yang <yangweiqing001@gmail.com>2016-08-08 09:24:37 +0100
committerSean Owen <sowen@cloudera.com>2016-08-08 09:24:37 +0100
commite10ca8de49206087b336c6db0c40868fa271b989 (patch)
tree13fbeadf475c07280f55e7d4b24a328ceafa1781 /examples
parent1db1c6567bae0c80fdc522f2cbb65557cd62263f (diff)
downloadspark-e10ca8de49206087b336c6db0c40868fa271b989.tar.gz
spark-e10ca8de49206087b336c6db0c40868fa271b989.tar.bz2
spark-e10ca8de49206087b336c6db0c40868fa271b989.zip
[SPARK-16945] Fix Java Lint errors
## What changes were proposed in this pull request? This PR is to fix the minor Java linter errors as following: [ERROR] src/main/java/org/apache/spark/sql/catalyst/expressions/VariableLengthRowBasedKeyValueBatch.java:[42,10] (modifier) RedundantModifier: Redundant 'final' modifier. [ERROR] src/main/java/org/apache/spark/sql/catalyst/expressions/VariableLengthRowBasedKeyValueBatch.java:[97,10] (modifier) RedundantModifier: Redundant 'final' modifier. ## How was this patch tested? Manual test. dev/lint-java Using `mvn` from path: /usr/local/bin/mvn Checkstyle checks passed. Author: Weiqing Yang <yangweiqing001@gmail.com> Closes #14532 from Sherry302/master.
Diffstat (limited to 'examples')
-rw-r--r--examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java3
1 files changed, 2 insertions, 1 deletions
diff --git a/examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java b/examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java
index fc92446783..f9087e0593 100644
--- a/examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java
+++ b/examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java
@@ -221,7 +221,8 @@ public class JavaSQLDataSourceExample {
// an RDD[String] storing one JSON object per string.
List<String> jsonData = Arrays.asList(
"{\"name\":\"Yin\",\"address\":{\"city\":\"Columbus\",\"state\":\"Ohio\"}}");
- JavaRDD<String> anotherPeopleRDD = new JavaSparkContext(spark.sparkContext()).parallelize(jsonData);
+ JavaRDD<String> anotherPeopleRDD =
+ new JavaSparkContext(spark.sparkContext()).parallelize(jsonData);
Dataset anotherPeople = spark.read().json(anotherPeopleRDD);
anotherPeople.show();
// +---------------+----+