aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
Diffstat (limited to 'sql')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/hiveOperators.scala14
1 files changed, 10 insertions, 4 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/hiveOperators.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/hiveOperators.scala
index 240aa0df49..b19579331f 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/hiveOperators.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/hiveOperators.scala
@@ -371,12 +371,18 @@ case class InsertIntoHiveTable(
ObjectInspectorCopyOption.JAVA)
.asInstanceOf[StructObjectInspector]
+
+ val fieldOIs = standardOI.getAllStructFieldRefs.map(_.getFieldObjectInspector).toArray
+ val outputData = new Array[Any](fieldOIs.length)
iter.map { row =>
- // Casts Strings to HiveVarchars when necessary.
- val fieldOIs = standardOI.getAllStructFieldRefs.map(_.getFieldObjectInspector)
- val mappedRow = row.zip(fieldOIs).map(wrap)
+ var i = 0
+ while (i < row.length) {
+ // Casts Strings to HiveVarchars when necessary.
+ outputData(i) = wrap(row(i), fieldOIs(i))
+ i += 1
+ }
- serializer.serialize(mappedRow.toArray, standardOI)
+ serializer.serialize(outputData, standardOI)
}
}