aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorwangfei <wangfei1@huawei.com>2014-12-02 14:30:44 -0800
committerMichael Armbrust <michael@databricks.com>2014-12-02 14:31:13 -0800
commit658fe8f1a911e080c9a63e67c9185492152c966e (patch)
tree92b9ca21df989f7c4039eaa26a9af4ef545bd8bd /sql
parentadc5d6f09edfc366f2ae151c2c3c13e07821d386 (diff)
downloadspark-658fe8f1a911e080c9a63e67c9185492152c966e.tar.gz
spark-658fe8f1a911e080c9a63e67c9185492152c966e.tar.bz2
spark-658fe8f1a911e080c9a63e67c9185492152c966e.zip
[SPARK-4695][SQL] Get result using executeCollect
Using ```executeCollect``` to collect the result, because executeCollect is a custom implementation of collect in spark sql which better than rdd's collect Author: wangfei <wangfei1@huawei.com> Closes #3547 from scwf/executeCollect and squashes the following commits: a5ab68e [wangfei] Revert "adding debug info" a60d680 [wangfei] fix test failure 0db7ce8 [wangfei] adding debug info 184c594 [wangfei] using executeCollect instead collect (cherry picked from commit 3ae0cda83c5106136e90d59c20e61db345a5085f) Signed-off-by: Michael Armbrust <michael@databricks.com>
Diffstat (limited to 'sql')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala4
1 files changed, 3 insertions, 1 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index 304b9a73ee..34fc21e61f 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -377,7 +377,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
command.executeCollect().map(_.head.toString)
case other =>
- val result: Seq[Seq[Any]] = toRdd.map(_.copy()).collect().toSeq
+ val result: Seq[Seq[Any]] = other.executeCollect().toSeq
// We need the types so we can output struct field names
val types = analyzed.output.map(_.dataType)
// Reformat to match hive tab delimited output.
@@ -416,6 +416,8 @@ object HiveContext {
case (bin: Array[Byte], BinaryType) => new String(bin, "UTF-8")
case (decimal: Decimal, DecimalType()) => // Hive strips trailing zeros so use its toString
HiveShim.createDecimal(decimal.toBigDecimal.underlying()).toString
+ case (decimal: BigDecimal, DecimalType()) =>
+ HiveShim.createDecimal(decimal.underlying()).toString
case (other, tpe) if primitiveTypes contains tpe => other.toString
}