aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorKousuke Saruta <sarutak@oss.nttdata.co.jp>2016-01-12 00:51:00 -0800
committerReynold Xin <rxin@databricks.com>2016-01-12 00:51:00 -0800
commit8cfa218f4f1b05f4d076ec15dd0a033ad3e4500d (patch)
tree31e99044d63b89311821df7a61e6f4882114677a /sql/hive
parent112abf9100f05be436e449817468c50174712c78 (diff)
downloadspark-8cfa218f4f1b05f4d076ec15dd0a033ad3e4500d.tar.gz
spark-8cfa218f4f1b05f4d076ec15dd0a033ad3e4500d.tar.bz2
spark-8cfa218f4f1b05f4d076ec15dd0a033ad3e4500d.zip
[SPARK-12692][BUILD][SQL] Scala style: Fix the style violation (Space before "," or ":")
Fix the style violation (space before , and :). This PR is a followup for #10643. Author: Kousuke Saruta <sarutak@oss.nttdata.co.jp> Closes #10718 from sarutak/SPARK-12692-followup-sql.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala8
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala4
4 files changed, 9 insertions, 9 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
index 7a260e72eb..c9df3c4a82 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
@@ -657,8 +657,8 @@ private[hive] trait HiveInspectors {
case DecimalType() => PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector
case StructType(fields) =>
ObjectInspectorFactory.getStandardStructObjectInspector(
- java.util.Arrays.asList(fields.map(f => f.name) : _*),
- java.util.Arrays.asList(fields.map(f => toInspector(f.dataType)) : _*))
+ java.util.Arrays.asList(fields.map(f => f.name): _*),
+ java.util.Arrays.asList(fields.map(f => toInspector(f.dataType)): _*))
}
/**
@@ -905,8 +905,8 @@ private[hive] trait HiveInspectors {
getListTypeInfo(elemType.toTypeInfo)
case StructType(fields) =>
getStructTypeInfo(
- java.util.Arrays.asList(fields.map(_.name) : _*),
- java.util.Arrays.asList(fields.map(_.dataType.toTypeInfo) : _*))
+ java.util.Arrays.asList(fields.map(_.name): _*),
+ java.util.Arrays.asList(fields.map(_.dataType.toTypeInfo): _*))
case MapType(keyType, valueType, _) =>
getMapTypeInfo(keyType.toTypeInfo, valueType.toTypeInfo)
case BinaryType => binaryTypeInfo
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
index 56cab1aee8..912cd41173 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
@@ -181,7 +181,7 @@ private[hive] case class HiveSimpleUDF(
val ret = FunctionRegistry.invoke(
method,
function,
- conversionHelper.convertIfNecessary(inputs : _*): _*)
+ conversionHelper.convertIfNecessary(inputs: _*): _*)
unwrap(ret, returnInspector)
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
index 3b867bbfa1..ad28345a66 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
@@ -118,8 +118,8 @@ class HiveInspectorSuite extends SparkFunSuite with HiveInspectors {
case DecimalType() => PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector
case StructType(fields) =>
ObjectInspectorFactory.getStandardStructObjectInspector(
- java.util.Arrays.asList(fields.map(f => f.name) : _*),
- java.util.Arrays.asList(fields.map(f => toWritableInspector(f.dataType)) : _*))
+ java.util.Arrays.asList(fields.map(f => f.name): _*),
+ java.util.Arrays.asList(fields.map(f => toWritableInspector(f.dataType)): _*))
}
def checkDataType(dt1: Seq[DataType], dt2: Seq[DataType]): Unit = {
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
index da7303c791..40e9c9362c 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
@@ -154,8 +154,8 @@ class InsertIntoHiveTableSuite extends QueryTest with TestHiveSingleton with Bef
}
val expected = List(
"p1=a"::"p2=b"::"p3=c"::"p4=c"::"p5=2"::Nil,
- "p1=a"::"p2=b"::"p3=c"::"p4=c"::"p5=3"::Nil ,
- "p1=a"::"p2=b"::"p3=c"::"p4=c"::"p5=1"::Nil ,
+ "p1=a"::"p2=b"::"p3=c"::"p4=c"::"p5=3"::Nil,
+ "p1=a"::"p2=b"::"p3=c"::"p4=c"::"p5=1"::Nil,
"p1=a"::"p2=b"::"p3=c"::"p4=c"::"p5=4"::Nil
)
assert(listFolders(tmpDir, List()).sortBy(_.toString()) === expected.sortBy(_.toString))