From 31ca741aef9dd138529e064785c8e58b86140ff5 Mon Sep 17 00:00:00 2001 From: Jacek Lewandowski Date: Thu, 14 Jul 2016 10:18:31 -0700 Subject: [SPARK-16528][SQL] Fix NPE problem in HiveClientImpl ## What changes were proposed in this pull request? There are some calls to methods or fields (getParameters, properties) which are then passed to Java/Scala collection converters. Unfortunately those fields can be null in some cases and then the conversions throws NPE. We fix it by wrapping calls to those fields and methods with option and then do the conversion. ## How was this patch tested? Manually tested with a custom Hive metastore. Author: Jacek Lewandowski Closes #14200 from jacek-lewandowski/SPARK-16528. --- .../org/apache/spark/sql/hive/client/HiveClientImpl.scala | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) (limited to 'sql/hive') diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala index 7e0cef3e35..2f102a88cc 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala @@ -293,7 +293,7 @@ private[hive] class HiveClientImpl( database.name, database.description, database.locationUri, - database.properties.asJava), + Option(database.properties).map(_.asJava).orNull), ignoreIfExists) } @@ -311,7 +311,7 @@ private[hive] class HiveClientImpl( database.name, database.description, database.locationUri, - database.properties.asJava)) + Option(database.properties).map(_.asJava).orNull)) } override def getDatabaseOption(name: String): Option[CatalogDatabase] = withHiveState { @@ -320,7 +320,7 @@ private[hive] class HiveClientImpl( name = d.getName, description = d.getDescription, locationUri = d.getLocationUri, - properties = d.getParameters.asScala.toMap) + properties = Option(d.getParameters).map(_.asScala.toMap).orNull) } } @@ -353,7 +353,7 @@ private[hive] class HiveClientImpl( unsupportedFeatures += "bucketing" } - val properties = h.getParameters.asScala.toMap + val properties = Option(h.getParameters).map(_.asScala.toMap).orNull CatalogTable( identifier = TableIdentifier(h.getTableName, Option(h.getDbName)), @@ -390,7 +390,8 @@ private[hive] class HiveClientImpl( outputFormat = Option(h.getOutputFormatClass).map(_.getName), serde = Option(h.getSerializationLib), compressed = h.getTTable.getSd.isCompressed, - serdeProperties = h.getTTable.getSd.getSerdeInfo.getParameters.asScala.toMap + serdeProperties = Option(h.getTTable.getSd.getSerdeInfo.getParameters) + .map(_.asScala.toMap).orNull ), properties = properties, viewOriginalText = Option(h.getViewOriginalText), @@ -817,6 +818,7 @@ private[hive] class HiveClientImpl( outputFormat = Option(apiPartition.getSd.getOutputFormat), serde = Option(apiPartition.getSd.getSerdeInfo.getSerializationLib), compressed = apiPartition.getSd.isCompressed, - serdeProperties = apiPartition.getSd.getSerdeInfo.getParameters.asScala.toMap)) + serdeProperties = Option(apiPartition.getSd.getSerdeInfo.getParameters) + .map(_.asScala.toMap).orNull)) } } -- cgit v1.2.3