diff options
author | Sameer Agarwal <sameerag@cs.berkeley.edu> | 2016-09-26 13:21:08 -0700 |
---|---|---|
committer | Yin Huai <yhuai@databricks.com> | 2016-09-26 13:21:08 -0700 |
commit | 7c7586aef9243081d02ea5065435234b5950ab66 (patch) | |
tree | c775ea91b369228d9966d279f7a8809626badabd /sql/core/src | |
parent | 8135e0e5ebdb9c7f5ac41c675dc8979a5127a31a (diff) | |
download | spark-7c7586aef9243081d02ea5065435234b5950ab66.tar.gz spark-7c7586aef9243081d02ea5065435234b5950ab66.tar.bz2 spark-7c7586aef9243081d02ea5065435234b5950ab66.zip |
[SPARK-17652] Fix confusing exception message while reserving capacity
## What changes were proposed in this pull request?
This minor patch fixes a confusing exception message while reserving additional capacity in the vectorized parquet reader.
## How was this patch tested?
Exisiting Unit Tests
Author: Sameer Agarwal <sameerag@cs.berkeley.edu>
Closes #15225 from sameeragarwal/error-msg.
Diffstat (limited to 'sql/core/src')
-rw-r--r-- | sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java | 14 | ||||
-rw-r--r-- | sql/core/src/test/scala/org/apache/spark/sql/execution/vectorized/ColumnarBatchSuite.scala | 4 |
2 files changed, 9 insertions, 9 deletions
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java index a7cb3b11f6..ff07940422 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java +++ b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java @@ -285,19 +285,19 @@ public abstract class ColumnVector implements AutoCloseable { try { reserveInternal(newCapacity); } catch (OutOfMemoryError outOfMemoryError) { - throwUnsupportedException(newCapacity, requiredCapacity, outOfMemoryError); + throwUnsupportedException(requiredCapacity, outOfMemoryError); } } else { - throwUnsupportedException(newCapacity, requiredCapacity, null); + throwUnsupportedException(requiredCapacity, null); } } } - private void throwUnsupportedException(int newCapacity, int requiredCapacity, Throwable cause) { - String message = "Cannot reserve more than " + newCapacity + - " bytes in the vectorized reader (requested = " + requiredCapacity + " bytes). As a" + - " workaround, you can disable the vectorized reader by setting " - + SQLConf.PARQUET_VECTORIZED_READER_ENABLED().key() + " to false."; + private void throwUnsupportedException(int requiredCapacity, Throwable cause) { + String message = "Cannot reserve additional contiguous bytes in the vectorized reader " + + "(requested = " + requiredCapacity + " bytes). As a workaround, you can disable the " + + "vectorized reader by setting " + SQLConf.PARQUET_VECTORIZED_READER_ENABLED().key() + + " to false."; if (cause != null) { throw new RuntimeException(message, cause); diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/vectorized/ColumnarBatchSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/vectorized/ColumnarBatchSuite.scala index 100cc4daca..e3943f31a4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/vectorized/ColumnarBatchSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/vectorized/ColumnarBatchSuite.scala @@ -802,8 +802,8 @@ class ColumnarBatchSuite extends SparkFunSuite { // Over-allocating beyond MAX_CAPACITY throws an exception column.appendBytes(10, 0.toByte) } - assert(ex.getMessage.contains(s"Cannot reserve more than ${column.MAX_CAPACITY} bytes in " + - s"the vectorized reader")) + assert(ex.getMessage.contains(s"Cannot reserve additional contiguous bytes in the " + + s"vectorized reader")) } } } |