aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2015-09-12 10:40:10 +0100
committerSean Owen <sowen@cloudera.com>2015-09-12 10:40:10 +0100
commit22730ad54d681ad30e63fe910e8d89360853177d (patch)
tree81194034499a6d391a0949e865fc0aa6dd5fc4ec /sql/hive
parent8285e3b0d3dc0eff669eba993742dfe0401116f9 (diff)
downloadspark-22730ad54d681ad30e63fe910e8d89360853177d.tar.gz
spark-22730ad54d681ad30e63fe910e8d89360853177d.tar.bz2
spark-22730ad54d681ad30e63fe910e8d89360853177d.zip
[SPARK-10547] [TEST] Streamline / improve style of Java API tests
Fix a few Java API test style issues: unused generic types, exceptions, wrong assert argument order Author: Sean Owen <sowen@cloudera.com> Closes #8706 from srowen/SPARK-10547.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/test/java/org/apache/spark/sql/hive/JavaDataFrameSuite.java8
-rw-r--r--sql/hive/src/test/java/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java12
2 files changed, 10 insertions, 10 deletions
diff --git a/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaDataFrameSuite.java b/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaDataFrameSuite.java
index 019d8a3026..b4bf9eef8f 100644
--- a/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaDataFrameSuite.java
+++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaDataFrameSuite.java
@@ -40,7 +40,7 @@ public class JavaDataFrameSuite {
DataFrame df;
- private void checkAnswer(DataFrame actual, List<Row> expected) {
+ private static void checkAnswer(DataFrame actual, List<Row> expected) {
String errorMessage = QueryTest$.MODULE$.checkAnswer(actual, expected);
if (errorMessage != null) {
Assert.fail(errorMessage);
@@ -52,7 +52,7 @@ public class JavaDataFrameSuite {
hc = TestHive$.MODULE$;
sc = new JavaSparkContext(hc.sparkContext());
- List<String> jsonObjects = new ArrayList<String>(10);
+ List<String> jsonObjects = new ArrayList<>(10);
for (int i = 0; i < 10; i++) {
jsonObjects.add("{\"key\":" + i + ", \"value\":\"str" + i + "\"}");
}
@@ -71,7 +71,7 @@ public class JavaDataFrameSuite {
@Test
public void saveTableAndQueryIt() {
checkAnswer(
- df.select(functions.avg("key").over(
+ df.select(avg("key").over(
Window.partitionBy("value").orderBy("key").rowsBetween(-1, 1))),
hc.sql("SELECT avg(key) " +
"OVER (PARTITION BY value " +
@@ -95,7 +95,7 @@ public class JavaDataFrameSuite {
registeredUDAF.apply(col("value")),
callUDF("mydoublesum", col("value")));
- List<Row> expectedResult = new ArrayList<Row>();
+ List<Row> expectedResult = new ArrayList<>();
expectedResult.add(RowFactory.create(4950.0, 9900.0, 9900.0, 9900.0));
checkAnswer(
aggregatedDF,
diff --git a/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java b/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java
index 4192155975..c8d272794d 100644
--- a/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java
+++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/JavaMetastoreDataSourcesSuite.java
@@ -53,7 +53,7 @@ public class JavaMetastoreDataSourcesSuite {
FileSystem fs;
DataFrame df;
- private void checkAnswer(DataFrame actual, List<Row> expected) {
+ private static void checkAnswer(DataFrame actual, List<Row> expected) {
String errorMessage = QueryTest$.MODULE$.checkAnswer(actual, expected);
if (errorMessage != null) {
Assert.fail(errorMessage);
@@ -77,7 +77,7 @@ public class JavaMetastoreDataSourcesSuite {
fs.delete(hiveManagedPath, true);
}
- List<String> jsonObjects = new ArrayList<String>(10);
+ List<String> jsonObjects = new ArrayList<>(10);
for (int i = 0; i < 10; i++) {
jsonObjects.add("{\"a\":" + i + ", \"b\":\"str" + i + "\"}");
}
@@ -97,7 +97,7 @@ public class JavaMetastoreDataSourcesSuite {
@Test
public void saveExternalTableAndQueryIt() {
- Map<String, String> options = new HashMap<String, String>();
+ Map<String, String> options = new HashMap<>();
options.put("path", path.toString());
df.write()
.format("org.apache.spark.sql.json")
@@ -120,7 +120,7 @@ public class JavaMetastoreDataSourcesSuite {
@Test
public void saveExternalTableWithSchemaAndQueryIt() {
- Map<String, String> options = new HashMap<String, String>();
+ Map<String, String> options = new HashMap<>();
options.put("path", path.toString());
df.write()
.format("org.apache.spark.sql.json")
@@ -132,7 +132,7 @@ public class JavaMetastoreDataSourcesSuite {
sqlContext.sql("SELECT * FROM javaSavedTable"),
df.collectAsList());
- List<StructField> fields = new ArrayList<StructField>();
+ List<StructField> fields = new ArrayList<>();
fields.add(DataTypes.createStructField("b", DataTypes.StringType, true));
StructType schema = DataTypes.createStructType(fields);
DataFrame loadedDF =
@@ -148,7 +148,7 @@ public class JavaMetastoreDataSourcesSuite {
@Test
public void saveTableAndQueryIt() {
- Map<String, String> options = new HashMap<String, String>();
+ Map<String, String> options = new HashMap<>();
df.write()
.format("org.apache.spark.sql.json")
.mode(SaveMode.Append)