diff options
author | Sean Owen <sowen@cloudera.com> | 2016-09-01 12:13:07 -0700 |
---|---|---|
committer | Josh Rosen <joshrosen@databricks.com> | 2016-09-01 12:13:07 -0700 |
commit | 3893e8c576cf1a6decc18701267ce7cd8caaf521 (patch) | |
tree | e7a7b61f13a348f52ae0a25162157b28203b58ca /sql | |
parent | 2be5f8d7e0819de03971d0af6fa310793d2d0e65 (diff) | |
download | spark-3893e8c576cf1a6decc18701267ce7cd8caaf521.tar.gz spark-3893e8c576cf1a6decc18701267ce7cd8caaf521.tar.bz2 spark-3893e8c576cf1a6decc18701267ce7cd8caaf521.zip |
[SPARK-17331][CORE][MLLIB] Avoid allocating 0-length arrays
## What changes were proposed in this pull request?
Avoid allocating some 0-length arrays, esp. in UTF8String, and by using Array.empty in Scala over Array[T]()
## How was this patch tested?
Jenkins
Author: Sean Owen <sowen@cloudera.com>
Closes #14895 from srowen/SPARK-17331.
Diffstat (limited to 'sql')
-rw-r--r-- | sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java | 2 | ||||
-rw-r--r-- | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala | 2 |
2 files changed, 2 insertions, 2 deletions
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java b/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java index 24adeadf95..747ab1809f 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java @@ -191,7 +191,7 @@ public class DataTypes { * Creates a StructType with the given list of StructFields ({@code fields}). */ public static StructType createStructType(List<StructField> fields) { - return createStructType(fields.toArray(new StructField[0])); + return createStructType(fields.toArray(new StructField[fields.size()])); } /** diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala index cbd504603b..37153e545a 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala @@ -322,6 +322,7 @@ object JdbcUtils extends Logging { conn.commit() } committed = true + Iterator.empty } catch { case e: SQLException => val cause = e.getNextException @@ -351,7 +352,6 @@ object JdbcUtils extends Logging { } } } - Array[Byte]().iterator } /** |