aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala6
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala3
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala3
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala6
4 files changed, 12 insertions, 6 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
index 4e74452f6c..e4b963efea 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
@@ -703,7 +703,8 @@ private[hive] trait HiveInspectors {
// 1. create the pojo (most likely) object
val result = x.create()
var i = 0
- while (i < fieldRefs.size) {
+ val size = fieldRefs.size
+ while (i < size) {
// 2. set the property for the pojo
val tpe = structType(i).dataType
x.setStructFieldData(
@@ -720,7 +721,8 @@ private[hive] trait HiveInspectors {
val row = a.asInstanceOf[InternalRow]
val result = new java.util.ArrayList[AnyRef](fieldRefs.size)
var i = 0
- while (i < fieldRefs.size) {
+ val size = fieldRefs.size
+ while (i < size) {
val tpe = structType(i).dataType
result.add(wrap(row.get(i, tpe), fieldRefs.get(i).getFieldObjectInspector, tpe))
i += 1
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala
index b4808fdbed..ec7e53efc8 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala
@@ -427,7 +427,8 @@ private[hive] object HadoopTableReader extends HiveInspectors with Logging {
iterator.map { value =>
val raw = converter.convert(rawDeser.deserialize(value))
var i = 0
- while (i < fieldRefs.length) {
+ val length = fieldRefs.length
+ while (i < length) {
val fieldValue = soi.getStructFieldData(raw, fieldRefs(i))
if (fieldValue == null) {
mutableRow.setNullAt(fieldOrdinals(i))
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
index 9347aeb8e0..962dd5a52e 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
@@ -153,7 +153,8 @@ private[hive] case class HiveGenericUDF(
returnInspector // Make sure initialized.
var i = 0
- while (i < children.length) {
+ val length = children.length
+ while (i < length) {
val idx = i
deferredObjects(i).asInstanceOf[DeferredObjectAdapter]
.set(() => children(idx).eval(input))
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala
index 286197b50e..03b508e11a 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala
@@ -190,7 +190,8 @@ private[orc] class OrcSerializer(dataSchema: StructType, conf: Configuration)
row: InternalRow): Unit = {
val fieldRefs = oi.getAllStructFieldRefs
var i = 0
- while (i < fieldRefs.size) {
+ val size = fieldRefs.size
+ while (i < size) {
oi.setStructFieldData(
struct,
@@ -289,7 +290,8 @@ private[orc] object OrcRelation extends HiveInspectors {
iterator.map { value =>
val raw = deserializer.deserialize(value)
var i = 0
- while (i < fieldRefs.length) {
+ val length = fieldRefs.length
+ while (i < length) {
val fieldValue = oi.getStructFieldData(raw, fieldRefs(i))
if (fieldValue == null) {
mutableRow.setNullAt(fieldOrdinals(i))