aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-01-12 12:56:52 -0800
committerReynold Xin <rxin@databricks.com>2016-01-12 12:56:52 -0800
commit0d543b98f3e3da5053f0476f4647a765460861f3 (patch)
tree41ebb97aa34bc1df5815562b3b9fef581223d143 /sql/hive
parent0ed430e315b9a409490a3604a619321b476cb520 (diff)
downloadspark-0d543b98f3e3da5053f0476f4647a765460861f3.tar.gz
spark-0d543b98f3e3da5053f0476f4647a765460861f3.tar.bz2
spark-0d543b98f3e3da5053f0476f4647a765460861f3.zip
Revert "[SPARK-12692][BUILD][SQL] Scala style: Fix the style violation (Space before "," or ":")"
This reverts commit 8cfa218f4f1b05f4d076ec15dd0a033ad3e4500d.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala8
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala4
4 files changed, 9 insertions, 9 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
index c9df3c4a82..7a260e72eb 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
@@ -657,8 +657,8 @@ private[hive] trait HiveInspectors {
case DecimalType() => PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector
case StructType(fields) =>
ObjectInspectorFactory.getStandardStructObjectInspector(
- java.util.Arrays.asList(fields.map(f => f.name): _*),
- java.util.Arrays.asList(fields.map(f => toInspector(f.dataType)): _*))
+ java.util.Arrays.asList(fields.map(f => f.name) : _*),
+ java.util.Arrays.asList(fields.map(f => toInspector(f.dataType)) : _*))
}
/**
@@ -905,8 +905,8 @@ private[hive] trait HiveInspectors {
getListTypeInfo(elemType.toTypeInfo)
case StructType(fields) =>
getStructTypeInfo(
- java.util.Arrays.asList(fields.map(_.name): _*),
- java.util.Arrays.asList(fields.map(_.dataType.toTypeInfo): _*))
+ java.util.Arrays.asList(fields.map(_.name) : _*),
+ java.util.Arrays.asList(fields.map(_.dataType.toTypeInfo) : _*))
case MapType(keyType, valueType, _) =>
getMapTypeInfo(keyType.toTypeInfo, valueType.toTypeInfo)
case BinaryType => binaryTypeInfo
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
index 912cd41173..56cab1aee8 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
@@ -181,7 +181,7 @@ private[hive] case class HiveSimpleUDF(
val ret = FunctionRegistry.invoke(
method,
function,
- conversionHelper.convertIfNecessary(inputs: _*): _*)
+ conversionHelper.convertIfNecessary(inputs : _*): _*)
unwrap(ret, returnInspector)
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
index ad28345a66..3b867bbfa1 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
@@ -118,8 +118,8 @@ class HiveInspectorSuite extends SparkFunSuite with HiveInspectors {
case DecimalType() => PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector
case StructType(fields) =>
ObjectInspectorFactory.getStandardStructObjectInspector(
- java.util.Arrays.asList(fields.map(f => f.name): _*),
- java.util.Arrays.asList(fields.map(f => toWritableInspector(f.dataType)): _*))
+ java.util.Arrays.asList(fields.map(f => f.name) : _*),
+ java.util.Arrays.asList(fields.map(f => toWritableInspector(f.dataType)) : _*))
}
def checkDataType(dt1: Seq[DataType], dt2: Seq[DataType]): Unit = {
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
index 40e9c9362c..da7303c791 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
@@ -154,8 +154,8 @@ class InsertIntoHiveTableSuite extends QueryTest with TestHiveSingleton with Bef
}
val expected = List(
"p1=a"::"p2=b"::"p3=c"::"p4=c"::"p5=2"::Nil,
- "p1=a"::"p2=b"::"p3=c"::"p4=c"::"p5=3"::Nil,
- "p1=a"::"p2=b"::"p3=c"::"p4=c"::"p5=1"::Nil,
+ "p1=a"::"p2=b"::"p3=c"::"p4=c"::"p5=3"::Nil ,
+ "p1=a"::"p2=b"::"p3=c"::"p4=c"::"p5=1"::Nil ,
"p1=a"::"p2=b"::"p3=c"::"p4=c"::"p5=4"::Nil
)
assert(listFolders(tmpDir, List()).sortBy(_.toString()) === expected.sortBy(_.toString))