aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-03-09 10:31:26 +0000
committerSean Owen <sowen@cloudera.com>2016-03-09 10:31:26 +0000
commitc3689bc24e03a9471cd6e8169da61963c4528252 (patch)
tree5d1ee90afa2087ede8e4dbc4dd666d699578c230 /sql/hive
parentcbff2803ef117d7cffe6f05fc1bbd395a1e9c587 (diff)
downloadspark-c3689bc24e03a9471cd6e8169da61963c4528252.tar.gz
spark-c3689bc24e03a9471cd6e8169da61963c4528252.tar.bz2
spark-c3689bc24e03a9471cd6e8169da61963c4528252.zip
[SPARK-13702][CORE][SQL][MLLIB] Use diamond operator for generic instance creation in Java code.
## What changes were proposed in this pull request? In order to make `docs/examples` (and other related code) more simple/readable/user-friendly, this PR replaces existing codes like the followings by using `diamond` operator. ``` - final ArrayList<Product2<Object, Object>> dataToWrite = - new ArrayList<Product2<Object, Object>>(); + final ArrayList<Product2<Object, Object>> dataToWrite = new ArrayList<>(); ``` Java 7 or higher supports **diamond** operator which replaces the type arguments required to invoke the constructor of a generic class with an empty set of type parameters (<>). Currently, Spark Java code use mixed usage of this. ## How was this patch tested? Manual. Pass the existing tests. Author: Dongjoon Hyun <dongjoon@apache.org> Closes #11541 from dongjoon-hyun/SPARK-13702.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/test/java/org/apache/spark/sql/hive/aggregate/MyDoubleAvg.java4
-rw-r--r--sql/hive/src/test/java/org/apache/spark/sql/hive/aggregate/MyDoubleSum.java4
-rw-r--r--sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java32
3 files changed, 20 insertions, 20 deletions
diff --git a/sql/hive/src/test/java/org/apache/spark/sql/hive/aggregate/MyDoubleAvg.java b/sql/hive/src/test/java/org/apache/spark/sql/hive/aggregate/MyDoubleAvg.java
index 5a167edd89..ae0c097c36 100644
--- a/sql/hive/src/test/java/org/apache/spark/sql/hive/aggregate/MyDoubleAvg.java
+++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/aggregate/MyDoubleAvg.java
@@ -42,14 +42,14 @@ public class MyDoubleAvg extends UserDefinedAggregateFunction {
private DataType _returnDataType;
public MyDoubleAvg() {
- List<StructField> inputFields = new ArrayList<StructField>();
+ List<StructField> inputFields = new ArrayList<>();
inputFields.add(DataTypes.createStructField("inputDouble", DataTypes.DoubleType, true));
_inputDataType = DataTypes.createStructType(inputFields);
// The buffer has two values, bufferSum for storing the current sum and
// bufferCount for storing the number of non-null input values that have been contribuetd
// to the current sum.
- List<StructField> bufferFields = new ArrayList<StructField>();
+ List<StructField> bufferFields = new ArrayList<>();
bufferFields.add(DataTypes.createStructField("bufferSum", DataTypes.DoubleType, true));
bufferFields.add(DataTypes.createStructField("bufferCount", DataTypes.LongType, true));
_bufferSchema = DataTypes.createStructType(bufferFields);
diff --git a/sql/hive/src/test/java/org/apache/spark/sql/hive/aggregate/MyDoubleSum.java b/sql/hive/src/test/java/org/apache/spark/sql/hive/aggregate/MyDoubleSum.java
index c3b7768e71..d17fb3e519 100644
--- a/sql/hive/src/test/java/org/apache/spark/sql/hive/aggregate/MyDoubleSum.java
+++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/aggregate/MyDoubleSum.java
@@ -41,11 +41,11 @@ public class MyDoubleSum extends UserDefinedAggregateFunction {
private DataType _returnDataType;
public MyDoubleSum() {
- List<StructField> inputFields = new ArrayList<StructField>();
+ List<StructField> inputFields = new ArrayList<>();
inputFields.add(DataTypes.createStructField("inputDouble", DataTypes.DoubleType, true));
_inputDataType = DataTypes.createStructType(inputFields);
- List<StructField> bufferFields = new ArrayList<StructField>();
+ List<StructField> bufferFields = new ArrayList<>();
bufferFields.add(DataTypes.createStructField("bufferDouble", DataTypes.DoubleType, true));
_bufferSchema = DataTypes.createStructType(bufferFields);
diff --git a/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java b/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java
index 4ef1f276d1..fc24600a1e 100644
--- a/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java
+++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java
@@ -50,7 +50,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
private static final org.apache.thrift.protocol.TField LINT_STRING_FIELD_DESC = new org.apache.thrift.protocol.TField("lintString", org.apache.thrift.protocol.TType.LIST, (short)5);
private static final org.apache.thrift.protocol.TField M_STRING_STRING_FIELD_DESC = new org.apache.thrift.protocol.TField("mStringString", org.apache.thrift.protocol.TType.MAP, (short)6);
- private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<>();
static {
schemes.put(StandardScheme.class, new ComplexStandardSchemeFactory());
schemes.put(TupleScheme.class, new ComplexTupleSchemeFactory());
@@ -72,7 +72,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
LINT_STRING((short)5, "lintString"),
M_STRING_STRING((short)6, "mStringString");
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+ private static final Map<String, _Fields> byName = new HashMap<>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
@@ -141,7 +141,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
- Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<>(_Fields.class);
tmpMap.put(_Fields.AINT, new org.apache.thrift.meta_data.FieldMetaData("aint", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.A_STRING, new org.apache.thrift.meta_data.FieldMetaData("aString", org.apache.thrift.TFieldRequirementType.DEFAULT,
@@ -194,28 +194,28 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
this.aString = other.aString;
}
if (other.isSetLint()) {
- List<Integer> __this__lint = new ArrayList<Integer>();
+ List<Integer> __this__lint = new ArrayList<>();
for (Integer other_element : other.lint) {
__this__lint.add(other_element);
}
this.lint = __this__lint;
}
if (other.isSetLString()) {
- List<String> __this__lString = new ArrayList<String>();
+ List<String> __this__lString = new ArrayList<>();
for (String other_element : other.lString) {
__this__lString.add(other_element);
}
this.lString = __this__lString;
}
if (other.isSetLintString()) {
- List<IntString> __this__lintString = new ArrayList<IntString>();
+ List<IntString> __this__lintString = new ArrayList<>();
for (IntString other_element : other.lintString) {
__this__lintString.add(new IntString(other_element));
}
this.lintString = __this__lintString;
}
if (other.isSetMStringString()) {
- Map<String,String> __this__mStringString = new HashMap<String,String>();
+ Map<String,String> __this__mStringString = new HashMap<>();
for (Map.Entry<String, String> other_element : other.mStringString.entrySet()) {
String other_element_key = other_element.getKey();
@@ -339,7 +339,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
public void addToLString(String elem) {
if (this.lString == null) {
- this.lString = new ArrayList<String>();
+ this.lString = new ArrayList<>();
}
this.lString.add(elem);
}
@@ -411,7 +411,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
public void putToMStringString(String key, String val) {
if (this.mStringString == null) {
- this.mStringString = new HashMap<String,String>();
+ this.mStringString = new HashMap<>();
}
this.mStringString.put(key, val);
}
@@ -876,7 +876,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list0 = iprot.readListBegin();
- struct.lint = new ArrayList<Integer>(_list0.size);
+ struct.lint = new ArrayList<>(_list0.size);
for (int _i1 = 0; _i1 < _list0.size; ++_i1)
{
int _elem2; // required
@@ -894,7 +894,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list3 = iprot.readListBegin();
- struct.lString = new ArrayList<String>(_list3.size);
+ struct.lString = new ArrayList<>(_list3.size);
for (int _i4 = 0; _i4 < _list3.size; ++_i4)
{
String _elem5; // required
@@ -912,7 +912,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list6 = iprot.readListBegin();
- struct.lintString = new ArrayList<IntString>(_list6.size);
+ struct.lintString = new ArrayList<>(_list6.size);
for (int _i7 = 0; _i7 < _list6.size; ++_i7)
{
IntString _elem8; // required
@@ -1114,7 +1114,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
if (incoming.get(2)) {
{
org.apache.thrift.protocol.TList _list21 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, iprot.readI32());
- struct.lint = new ArrayList<Integer>(_list21.size);
+ struct.lint = new ArrayList<>(_list21.size);
for (int _i22 = 0; _i22 < _list21.size; ++_i22)
{
int _elem23; // required
@@ -1127,7 +1127,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
if (incoming.get(3)) {
{
org.apache.thrift.protocol.TList _list24 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
- struct.lString = new ArrayList<String>(_list24.size);
+ struct.lString = new ArrayList<>(_list24.size);
for (int _i25 = 0; _i25 < _list24.size; ++_i25)
{
String _elem26; // required
@@ -1140,7 +1140,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
if (incoming.get(4)) {
{
org.apache.thrift.protocol.TList _list27 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
- struct.lintString = new ArrayList<IntString>(_list27.size);
+ struct.lintString = new ArrayList<>(_list27.size);
for (int _i28 = 0; _i28 < _list27.size; ++_i28)
{
IntString _elem29; // required
@@ -1154,7 +1154,7 @@ public class Complex implements org.apache.thrift.TBase<Complex, Complex._Fields
if (incoming.get(5)) {
{
org.apache.thrift.protocol.TMap _map30 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
- struct.mStringString = new HashMap<String,String>(2*_map30.size);
+ struct.mStringString = new HashMap<>(2*_map30.size);
for (int _i31 = 0; _i31 < _map30.size; ++_i31)
{
String _key32; // required