aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2015-08-18 20:39:59 -0700
committerDavies Liu <davies.liu@gmail.com>2015-08-18 20:39:59 -0700
commit270ee677750a1f2adaf24b5816857194e61782ff (patch)
treef2558ee6b72355c1d3e457bf08558037a8df6e6c /sql
parentbf32c1f7f47dd907d787469f979c5859e02ce5e6 (diff)
downloadspark-270ee677750a1f2adaf24b5816857194e61782ff.tar.gz
spark-270ee677750a1f2adaf24b5816857194e61782ff.tar.bz2
spark-270ee677750a1f2adaf24b5816857194e61782ff.zip
[SPARK-10095] [SQL] use public API of BigInteger
In UnsafeRow, we use the private field of BigInteger for better performance, but it actually didn't contribute much (3% in one benchmark) to end-to-end runtime, and make it not portable (may fail on other JVM implementations). So we should use the public API instead. cc rxin Author: Davies Liu <davies@databricks.com> Closes #8286 from davies/portable_decimal.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java29
-rw-r--r--sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java9
2 files changed, 11 insertions, 27 deletions
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
index 7fd9477209..6c020045c3 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
@@ -273,14 +273,13 @@ public final class UnsafeRow extends MutableRow {
} else {
final BigInteger integer = value.toJavaBigDecimal().unscaledValue();
- final int[] mag = (int[]) Platform.getObjectVolatile(integer,
- Platform.BIG_INTEGER_MAG_OFFSET);
- assert(mag.length <= 4);
+ byte[] bytes = integer.toByteArray();
+ assert(bytes.length <= 16);
// Write the bytes to the variable length portion.
Platform.copyMemory(
- mag, Platform.INT_ARRAY_OFFSET, baseObject, baseOffset + cursor, mag.length * 4);
- setLong(ordinal, (cursor << 32) | ((long) (((integer.signum() + 1) << 8) + mag.length)));
+ bytes, Platform.BYTE_ARRAY_OFFSET, baseObject, baseOffset + cursor, bytes.length);
+ setLong(ordinal, (cursor << 32) | ((long) bytes.length));
}
}
}
@@ -375,8 +374,6 @@ public final class UnsafeRow extends MutableRow {
return Platform.getDouble(baseObject, getFieldOffset(ordinal));
}
- private static byte[] EMPTY = new byte[0];
-
@Override
public Decimal getDecimal(int ordinal, int precision, int scale) {
if (isNullAt(ordinal)) {
@@ -385,20 +382,10 @@ public final class UnsafeRow extends MutableRow {
if (precision <= Decimal.MAX_LONG_DIGITS()) {
return Decimal.apply(getLong(ordinal), precision, scale);
} else {
- long offsetAndSize = getLong(ordinal);
- long offset = offsetAndSize >>> 32;
- int signum = ((int) (offsetAndSize & 0xfff) >> 8);
- assert signum >=0 && signum <= 2 : "invalid signum " + signum;
- int size = (int) (offsetAndSize & 0xff);
- int[] mag = new int[size];
- Platform.copyMemory(
- baseObject, baseOffset + offset, mag, Platform.INT_ARRAY_OFFSET, size * 4);
-
- // create a BigInteger using signum and mag
- BigInteger v = new BigInteger(0, EMPTY); // create the initial object
- Platform.putInt(v, Platform.BIG_INTEGER_SIGNUM_OFFSET, signum - 1);
- Platform.putObjectVolatile(v, Platform.BIG_INTEGER_MAG_OFFSET, mag);
- return Decimal.apply(new BigDecimal(v, scale), precision, scale);
+ byte[] bytes = getBinary(ordinal);
+ BigInteger bigInteger = new BigInteger(bytes);
+ BigDecimal javaDecimal = new BigDecimal(bigInteger, scale);
+ return Decimal.apply(javaDecimal, precision, scale);
}
}
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java
index 005351f088..2f43db68a7 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java
@@ -71,16 +71,13 @@ public class UnsafeRowWriters {
}
final BigInteger integer = input.toJavaBigDecimal().unscaledValue();
- int signum = integer.signum() + 1;
- final int[] mag = (int[]) Platform.getObjectVolatile(
- integer, Platform.BIG_INTEGER_MAG_OFFSET);
- assert(mag.length <= 4);
+ byte[] bytes = integer.toByteArray();
// Write the bytes to the variable length portion.
Platform.copyMemory(
- mag, Platform.INT_ARRAY_OFFSET, base, target.getBaseOffset() + cursor, mag.length * 4);
+ bytes, Platform.BYTE_ARRAY_OFFSET, base, target.getBaseOffset() + cursor, bytes.length);
// Set the fixed length portion.
- target.setLong(ordinal, (((long) cursor) << 32) | ((long) ((signum << 8) + mag.length)));
+ target.setLong(ordinal, (((long) cursor) << 32) | (long) bytes.length);
return SIZE;
}