aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorJacek Lewandowski <lewandowski.jacek@gmail.com>2016-07-14 10:18:31 -0700
committerReynold Xin <rxin@databricks.com>2016-07-14 10:18:31 -0700
commit31ca741aef9dd138529e064785c8e58b86140ff5 (patch)
tree27a9ccf9495dad28d7cdb4a5273a418d1afb2d43 /sql
parentc576f9fb90853cce2e8e5dcc32a536a0f49cbbd8 (diff)
downloadspark-31ca741aef9dd138529e064785c8e58b86140ff5.tar.gz
spark-31ca741aef9dd138529e064785c8e58b86140ff5.tar.bz2
spark-31ca741aef9dd138529e064785c8e58b86140ff5.zip
[SPARK-16528][SQL] Fix NPE problem in HiveClientImpl
## What changes were proposed in this pull request? There are some calls to methods or fields (getParameters, properties) which are then passed to Java/Scala collection converters. Unfortunately those fields can be null in some cases and then the conversions throws NPE. We fix it by wrapping calls to those fields and methods with option and then do the conversion. ## How was this patch tested? Manually tested with a custom Hive metastore. Author: Jacek Lewandowski <lewandowski.jacek@gmail.com> Closes #14200 from jacek-lewandowski/SPARK-16528.
Diffstat (limited to 'sql')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala14
1 files changed, 8 insertions, 6 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
index 7e0cef3e35..2f102a88cc 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
@@ -293,7 +293,7 @@ private[hive] class HiveClientImpl(
database.name,
database.description,
database.locationUri,
- database.properties.asJava),
+ Option(database.properties).map(_.asJava).orNull),
ignoreIfExists)
}
@@ -311,7 +311,7 @@ private[hive] class HiveClientImpl(
database.name,
database.description,
database.locationUri,
- database.properties.asJava))
+ Option(database.properties).map(_.asJava).orNull))
}
override def getDatabaseOption(name: String): Option[CatalogDatabase] = withHiveState {
@@ -320,7 +320,7 @@ private[hive] class HiveClientImpl(
name = d.getName,
description = d.getDescription,
locationUri = d.getLocationUri,
- properties = d.getParameters.asScala.toMap)
+ properties = Option(d.getParameters).map(_.asScala.toMap).orNull)
}
}
@@ -353,7 +353,7 @@ private[hive] class HiveClientImpl(
unsupportedFeatures += "bucketing"
}
- val properties = h.getParameters.asScala.toMap
+ val properties = Option(h.getParameters).map(_.asScala.toMap).orNull
CatalogTable(
identifier = TableIdentifier(h.getTableName, Option(h.getDbName)),
@@ -390,7 +390,8 @@ private[hive] class HiveClientImpl(
outputFormat = Option(h.getOutputFormatClass).map(_.getName),
serde = Option(h.getSerializationLib),
compressed = h.getTTable.getSd.isCompressed,
- serdeProperties = h.getTTable.getSd.getSerdeInfo.getParameters.asScala.toMap
+ serdeProperties = Option(h.getTTable.getSd.getSerdeInfo.getParameters)
+ .map(_.asScala.toMap).orNull
),
properties = properties,
viewOriginalText = Option(h.getViewOriginalText),
@@ -817,6 +818,7 @@ private[hive] class HiveClientImpl(
outputFormat = Option(apiPartition.getSd.getOutputFormat),
serde = Option(apiPartition.getSd.getSerdeInfo.getSerializationLib),
compressed = apiPartition.getSd.isCompressed,
- serdeProperties = apiPartition.getSd.getSerdeInfo.getParameters.asScala.toMap))
+ serdeProperties = Option(apiPartition.getSd.getSerdeInfo.getParameters)
+ .map(_.asScala.toMap).orNull))
}
}