aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorwangzhenhua <wangzhenhua@huawei.com>2017-02-24 10:24:59 -0800
committerWenchen Fan <wenchen@databricks.com>2017-02-24 10:24:59 -0800
commit69d0da6373979ce5b2bcd52933b5a7660d893e88 (patch)
tree0604364d8facb22837c95564d6df4d05736477c7 /sql/catalyst
parent05954f32e9bde56dc1f9a72028900d705185f6d7 (diff)
downloadspark-69d0da6373979ce5b2bcd52933b5a7660d893e88.tar.gz
spark-69d0da6373979ce5b2bcd52933b5a7660d893e88.tar.bz2
spark-69d0da6373979ce5b2bcd52933b5a7660d893e88.zip
[SPARK-17078][SQL] Show stats when explain
## What changes were proposed in this pull request? Currently we can only check the estimated stats in logical plans by debugging. We need to provide an easier and more efficient way for developers/users. In this pr, we add EXPLAIN COST command to show stats in the optimized logical plan. E.g. ``` spark-sql> EXPLAIN COST select count(1) from store_returns; ... == Optimized Logical Plan == Aggregate [count(1) AS count(1)#24L], Statistics(sizeInBytes=16.0 B, rowCount=1, isBroadcastable=false) +- Project, Statistics(sizeInBytes=4.3 GB, rowCount=5.76E+8, isBroadcastable=false) +- Relation[sr_returned_date_sk#3,sr_return_time_sk#4,sr_item_sk#5,sr_customer_sk#6,sr_cdemo_sk#7,sr_hdemo_sk#8,sr_addr_sk#9,sr_store_sk#10,sr_reason_sk#11,sr_ticket_number#12,sr_return_quantity#13,sr_return_amt#14,sr_return_tax#15,sr_return_amt_inc_tax#16,sr_fee#17,sr_return_ship_cost#18,sr_refunded_cash#19,sr_reversed_charge#20,sr_store_credit#21,sr_net_loss#22] parquet, Statistics(sizeInBytes=28.6 GB, rowCount=5.76E+8, isBroadcastable=false) ... ``` ## How was this patch tested? Add test cases. Author: wangzhenhua <wangzhenhua@huawei.com> Author: Zhenhua Wang <wzh_zju@163.com> Closes #16594 from wzhfy/showStats.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g46
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/Statistics.scala12
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala27
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala4
5 files changed, 39 insertions, 14 deletions
diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index d8cd68e2d9..59f93b3c46 100644
--- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -123,7 +123,8 @@ statement
| CREATE TEMPORARY? FUNCTION qualifiedName AS className=STRING
(USING resource (',' resource)*)? #createFunction
| DROP TEMPORARY? FUNCTION (IF EXISTS)? qualifiedName #dropFunction
- | EXPLAIN (LOGICAL | FORMATTED | EXTENDED | CODEGEN)? statement #explain
+ | EXPLAIN (LOGICAL | FORMATTED | EXTENDED | CODEGEN | COST)?
+ statement #explain
| SHOW TABLES ((FROM | IN) db=identifier)?
(LIKE? pattern=STRING)? #showTables
| SHOW TABLE EXTENDED ((FROM | IN) db=identifier)?
@@ -693,7 +694,7 @@ nonReserved
| DELIMITED | FIELDS | TERMINATED | COLLECTION | ITEMS | KEYS | ESCAPED | LINES | SEPARATED
| EXTENDED | REFRESH | CLEAR | CACHE | UNCACHE | LAZY | GLOBAL | TEMPORARY | OPTIONS
| GROUPING | CUBE | ROLLUP
- | EXPLAIN | FORMAT | LOGICAL | FORMATTED | CODEGEN
+ | EXPLAIN | FORMAT | LOGICAL | FORMATTED | CODEGEN | COST
| TABLESAMPLE | USE | TO | BUCKET | PERCENTLIT | OUT | OF
| SET | RESET
| VIEW | REPLACE
@@ -794,6 +795,7 @@ EXPLAIN: 'EXPLAIN';
FORMAT: 'FORMAT';
LOGICAL: 'LOGICAL';
CODEGEN: 'CODEGEN';
+COST: 'COST';
CAST: 'CAST';
SHOW: 'SHOW';
TABLES: 'TABLES';
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
index 0937825e27..e22b429aec 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
@@ -115,6 +115,10 @@ abstract class LogicalPlan extends QueryPlan[LogicalPlan] with Logging {
Statistics(sizeInBytes = children.map(_.stats(conf).sizeInBytes).product)
}
+ override def verboseStringWithSuffix: String = {
+ super.verboseString + statsCache.map(", " + _.toString).getOrElse("")
+ }
+
/**
* Returns the maximum number of rows that this plan may compute.
*
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/Statistics.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/Statistics.scala
index 91404d4bb8..f24b240956 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/Statistics.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/Statistics.scala
@@ -17,6 +17,8 @@
package org.apache.spark.sql.catalyst.plans.logical
+import java.math.{MathContext, RoundingMode}
+
import scala.util.control.NonFatal
import org.apache.spark.internal.Logging
@@ -24,6 +26,7 @@ import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.types._
+import org.apache.spark.util.Utils
/**
@@ -54,8 +57,13 @@ case class Statistics(
/** Readable string representation for the Statistics. */
def simpleString: String = {
- Seq(s"sizeInBytes=$sizeInBytes",
- if (rowCount.isDefined) s"rowCount=${rowCount.get}" else "",
+ Seq(s"sizeInBytes=${Utils.bytesToString(sizeInBytes)}",
+ if (rowCount.isDefined) {
+ // Show row count in scientific notation.
+ s"rowCount=${BigDecimal(rowCount.get, new MathContext(3, RoundingMode.HALF_UP)).toString()}"
+ } else {
+ ""
+ },
s"isBroadcastable=$isBroadcastable"
).filter(_.nonEmpty).mkString(", ")
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
index f37661c315..cc4c083595 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
@@ -453,13 +453,16 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product {
/** ONE line description of this node with more information */
def verboseString: String
+ /** ONE line description of this node with some suffix information */
+ def verboseStringWithSuffix: String = verboseString
+
override def toString: String = treeString
/** Returns a string representation of the nodes in this tree */
def treeString: String = treeString(verbose = true)
- def treeString(verbose: Boolean): String = {
- generateTreeString(0, Nil, new StringBuilder, verbose).toString
+ def treeString(verbose: Boolean, addSuffix: Boolean = false): String = {
+ generateTreeString(0, Nil, new StringBuilder, verbose = verbose, addSuffix = addSuffix).toString
}
/**
@@ -524,7 +527,8 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product {
lastChildren: Seq[Boolean],
builder: StringBuilder,
verbose: Boolean,
- prefix: String = ""): StringBuilder = {
+ prefix: String = "",
+ addSuffix: Boolean = false): StringBuilder = {
if (depth > 0) {
lastChildren.init.foreach { isLast =>
@@ -533,22 +537,29 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product {
builder.append(if (lastChildren.last) "+- " else ":- ")
}
+ val str = if (verbose) {
+ if (addSuffix) verboseStringWithSuffix else verboseString
+ } else {
+ simpleString
+ }
builder.append(prefix)
- builder.append(if (verbose) verboseString else simpleString)
+ builder.append(str)
builder.append("\n")
if (innerChildren.nonEmpty) {
innerChildren.init.foreach(_.generateTreeString(
- depth + 2, lastChildren :+ children.isEmpty :+ false, builder, verbose))
+ depth + 2, lastChildren :+ children.isEmpty :+ false, builder, verbose,
+ addSuffix = addSuffix))
innerChildren.last.generateTreeString(
- depth + 2, lastChildren :+ children.isEmpty :+ true, builder, verbose)
+ depth + 2, lastChildren :+ children.isEmpty :+ true, builder, verbose,
+ addSuffix = addSuffix)
}
if (children.nonEmpty) {
children.init.foreach(_.generateTreeString(
- depth + 1, lastChildren :+ false, builder, verbose, prefix))
+ depth + 1, lastChildren :+ false, builder, verbose, prefix, addSuffix))
children.last.generateTreeString(
- depth + 1, lastChildren :+ true, builder, verbose, prefix)
+ depth + 1, lastChildren :+ true, builder, verbose, prefix, addSuffix)
}
builder
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
index 7d46011b41..170c469197 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
@@ -25,8 +25,8 @@ class TableIdentifierParserSuite extends SparkFunSuite {
// Add "$elem$", "$value$" & "$key$"
val hiveNonReservedKeyword = Array("add", "admin", "after", "analyze", "archive", "asc", "before",
"bucket", "buckets", "cascade", "change", "cluster", "clustered", "clusterstatus", "collection",
- "columns", "comment", "compact", "compactions", "compute", "concatenate", "continue", "data",
- "day", "databases", "datetime", "dbproperties", "deferred", "defined", "delimited",
+ "columns", "comment", "compact", "compactions", "compute", "concatenate", "continue", "cost",
+ "data", "day", "databases", "datetime", "dbproperties", "deferred", "defined", "delimited",
"dependency", "desc", "directories", "directory", "disable", "distribute",
"enable", "escaped", "exclusive", "explain", "export", "fields", "file", "fileformat", "first",
"format", "formatted", "functions", "hold_ddltime", "hour", "idxproperties", "ignore", "index",