diff options
author | hyukjinkwon <gurwls223@gmail.com> | 2016-04-14 09:43:41 +0100 |
---|---|---|
committer | Sean Owen <sowen@cloudera.com> | 2016-04-14 09:43:41 +0100 |
commit | 6fc3dc8839eaed673c64ec87af6dfe24f8cebe0c (patch) | |
tree | db47cd619d84a7890ff1cacc78a44046ace85633 /sql/core/src | |
parent | 478af2f45595913c9b8f560d13e8d88447486f99 (diff) | |
download | spark-6fc3dc8839eaed673c64ec87af6dfe24f8cebe0c.tar.gz spark-6fc3dc8839eaed673c64ec87af6dfe24f8cebe0c.tar.bz2 spark-6fc3dc8839eaed673c64ec87af6dfe24f8cebe0c.zip |
[MINOR][SQL] Remove extra anonymous closure within functional transformations
## What changes were proposed in this pull request?
This PR removes extra anonymous closure within functional transformations.
For example,
```scala
.map(item => {
...
})
```
which can be just simply as below:
```scala
.map { item =>
...
}
```
## How was this patch tested?
Related unit tests and `sbt scalastyle`.
Author: hyukjinkwon <gurwls223@gmail.com>
Closes #12382 from HyukjinKwon/minor-extra-closers.
Diffstat (limited to 'sql/core/src')
-rw-r--r-- | sql/core/src/main/scala/org/apache/spark/sql/execution/basicOperators.scala | 4 | ||||
-rw-r--r-- | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala | 4 |
2 files changed, 4 insertions, 4 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/basicOperators.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/basicOperators.scala index aba500ad8d..344aaff348 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/basicOperators.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/basicOperators.scala @@ -400,7 +400,7 @@ case class Range( sqlContext .sparkContext .parallelize(0 until numSlices, numSlices) - .mapPartitionsWithIndex((i, _) => { + .mapPartitionsWithIndex { (i, _) => val partitionStart = (i * numElements) / numSlices * step + start val partitionEnd = (((i + 1) * numElements) / numSlices) * step + start def getSafeMargin(bi: BigInt): Long = @@ -444,7 +444,7 @@ case class Range( unsafeRow } } - }) + } } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala index b7ff5f7242..065c8572b0 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala @@ -251,12 +251,12 @@ object JdbcUtils extends Logging { def schemaString(df: DataFrame, url: String): String = { val sb = new StringBuilder() val dialect = JdbcDialects.get(url) - df.schema.fields foreach { field => { + df.schema.fields foreach { field => val name = field.name val typ: String = getJdbcType(field.dataType, dialect).databaseTypeDefinition val nullable = if (field.nullable) "" else "NOT NULL" sb.append(s", $name $typ $nullable") - }} + } if (sb.length < 2) "" else sb.substring(2) } |