aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorwangzhenhua <wangzhenhua@huawei.com>2017-02-24 10:24:59 -0800
committerWenchen Fan <wenchen@databricks.com>2017-02-24 10:24:59 -0800
commit69d0da6373979ce5b2bcd52933b5a7660d893e88 (patch)
tree0604364d8facb22837c95564d6df4d05736477c7 /core/src
parent05954f32e9bde56dc1f9a72028900d705185f6d7 (diff)
downloadspark-69d0da6373979ce5b2bcd52933b5a7660d893e88.tar.gz
spark-69d0da6373979ce5b2bcd52933b5a7660d893e88.tar.bz2
spark-69d0da6373979ce5b2bcd52933b5a7660d893e88.zip
[SPARK-17078][SQL] Show stats when explain
## What changes were proposed in this pull request? Currently we can only check the estimated stats in logical plans by debugging. We need to provide an easier and more efficient way for developers/users. In this pr, we add EXPLAIN COST command to show stats in the optimized logical plan. E.g. ``` spark-sql> EXPLAIN COST select count(1) from store_returns; ... == Optimized Logical Plan == Aggregate [count(1) AS count(1)#24L], Statistics(sizeInBytes=16.0 B, rowCount=1, isBroadcastable=false) +- Project, Statistics(sizeInBytes=4.3 GB, rowCount=5.76E+8, isBroadcastable=false) +- Relation[sr_returned_date_sk#3,sr_return_time_sk#4,sr_item_sk#5,sr_customer_sk#6,sr_cdemo_sk#7,sr_hdemo_sk#8,sr_addr_sk#9,sr_store_sk#10,sr_reason_sk#11,sr_ticket_number#12,sr_return_quantity#13,sr_return_amt#14,sr_return_tax#15,sr_return_amt_inc_tax#16,sr_fee#17,sr_return_ship_cost#18,sr_refunded_cash#19,sr_reversed_charge#20,sr_store_credit#21,sr_net_loss#22] parquet, Statistics(sizeInBytes=28.6 GB, rowCount=5.76E+8, isBroadcastable=false) ... ``` ## How was this patch tested? Add test cases. Author: wangzhenhua <wangzhenhua@huawei.com> Author: Zhenhua Wang <wzh_zju@163.com> Closes #16594 from wzhfy/showStats.
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala40
-rw-r--r--core/src/test/scala/org/apache/spark/util/UtilsSuite.scala5
2 files changed, 31 insertions, 14 deletions
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 480240a93d..10e5233679 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -19,6 +19,7 @@ package org.apache.spark.util
import java.io._
import java.lang.management.{LockInfo, ManagementFactory, MonitorInfo, ThreadInfo}
+import java.math.{MathContext, RoundingMode}
import java.net._
import java.nio.ByteBuffer
import java.nio.channels.Channels
@@ -1109,26 +1110,39 @@ private[spark] object Utils extends Logging {
/**
* Convert a quantity in bytes to a human-readable string such as "4.0 MB".
*/
- def bytesToString(size: Long): String = {
+ def bytesToString(size: Long): String = bytesToString(BigInt(size))
+
+ def bytesToString(size: BigInt): String = {
+ val EB = 1L << 60
+ val PB = 1L << 50
val TB = 1L << 40
val GB = 1L << 30
val MB = 1L << 20
val KB = 1L << 10
- val (value, unit) = {
- if (size >= 2*TB) {
- (size.asInstanceOf[Double] / TB, "TB")
- } else if (size >= 2*GB) {
- (size.asInstanceOf[Double] / GB, "GB")
- } else if (size >= 2*MB) {
- (size.asInstanceOf[Double] / MB, "MB")
- } else if (size >= 2*KB) {
- (size.asInstanceOf[Double] / KB, "KB")
- } else {
- (size.asInstanceOf[Double], "B")
+ if (size >= BigInt(1L << 11) * EB) {
+ // The number is too large, show it in scientific notation.
+ BigDecimal(size, new MathContext(3, RoundingMode.HALF_UP)).toString() + " B"
+ } else {
+ val (value, unit) = {
+ if (size >= 2 * EB) {
+ (BigDecimal(size) / EB, "EB")
+ } else if (size >= 2 * PB) {
+ (BigDecimal(size) / PB, "PB")
+ } else if (size >= 2 * TB) {
+ (BigDecimal(size) / TB, "TB")
+ } else if (size >= 2 * GB) {
+ (BigDecimal(size) / GB, "GB")
+ } else if (size >= 2 * MB) {
+ (BigDecimal(size) / MB, "MB")
+ } else if (size >= 2 * KB) {
+ (BigDecimal(size) / KB, "KB")
+ } else {
+ (BigDecimal(size), "B")
+ }
}
+ "%.1f %s".formatLocal(Locale.US, value, unit)
}
- "%.1f %s".formatLocal(Locale.US, value, unit)
}
/**
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index c9cf651ecf..8ed09749ff 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -200,7 +200,10 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
assert(Utils.bytesToString(2097152) === "2.0 MB")
assert(Utils.bytesToString(2306867) === "2.2 MB")
assert(Utils.bytesToString(5368709120L) === "5.0 GB")
- assert(Utils.bytesToString(5L * 1024L * 1024L * 1024L * 1024L) === "5.0 TB")
+ assert(Utils.bytesToString(5L * (1L << 40)) === "5.0 TB")
+ assert(Utils.bytesToString(5L * (1L << 50)) === "5.0 PB")
+ assert(Utils.bytesToString(5L * (1L << 60)) === "5.0 EB")
+ assert(Utils.bytesToString(BigInt(1L << 11) * (1L << 60)) === "2.36E+21 B")
}
test("copyStream") {