aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorJonathan Alter <jonalter@users.noreply.github.com>2015-07-10 11:34:01 +0100
committerSean Owen <sowen@cloudera.com>2015-07-10 11:34:01 +0100
commite14b545d2dcbc4587688b4c46718d3680b0a2f67 (patch)
tree8ae5fe8258bce4fd1cb2d55c7031de8b1dcc3963 /sql/core
parentd538919cc4fd3ab940d478c62dce1bae0270cfeb (diff)
downloadspark-e14b545d2dcbc4587688b4c46718d3680b0a2f67.tar.gz
spark-e14b545d2dcbc4587688b4c46718d3680b0a2f67.tar.bz2
spark-e14b545d2dcbc4587688b4c46718d3680b0a2f67.zip
[SPARK-7977] [BUILD] Disallowing println
Author: Jonathan Alter <jonalter@users.noreply.github.com> Closes #7093 from jonalter/SPARK-7977 and squashes the following commits: ccd44cc [Jonathan Alter] Changed println to log in ThreadingSuite 7fcac3e [Jonathan Alter] Reverting to println in ThreadingSuite 10724b6 [Jonathan Alter] Changing some printlns to logs in tests eeec1e7 [Jonathan Alter] Merge branch 'master' of github.com:apache/spark into SPARK-7977 0b1dcb4 [Jonathan Alter] More println cleanup aedaf80 [Jonathan Alter] Merge branch 'master' of github.com:apache/spark into SPARK-7977 925fd98 [Jonathan Alter] Merge branch 'master' of github.com:apache/spark into SPARK-7977 0c16fa3 [Jonathan Alter] Replacing some printlns with logs 45c7e05 [Jonathan Alter] Merge branch 'master' of github.com:apache/spark into SPARK-7977 5c8e283 [Jonathan Alter] Allowing println in audit-release examples 5b50da1 [Jonathan Alter] Allowing printlns in example files ca4b477 [Jonathan Alter] Merge branch 'master' of github.com:apache/spark into SPARK-7977 83ab635 [Jonathan Alter] Fixing new printlns 54b131f [Jonathan Alter] Merge branch 'master' of github.com:apache/spark into SPARK-7977 1cd8a81 [Jonathan Alter] Removing some unnecessary comments and printlns b837c3a [Jonathan Alter] Disallowing println
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/Column.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala6
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/debug/package.scala16
3 files changed, 16 insertions, 8 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Column.scala b/sql/core/src/main/scala/org/apache/spark/sql/Column.scala
index f201c8ea8a..1025026462 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Column.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Column.scala
@@ -860,11 +860,13 @@ class Column(protected[sql] val expr: Expression) extends Logging {
* @since 1.3.0
*/
def explain(extended: Boolean): Unit = {
+ // scalastyle:off println
if (extended) {
println(expr)
} else {
println(expr.prettyString)
}
+ // scalastyle:on println
}
/**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
index d7966651b1..830fba35bb 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
@@ -308,7 +308,9 @@ class DataFrame private[sql](
* @group basic
* @since 1.3.0
*/
+ // scalastyle:off println
def printSchema(): Unit = println(schema.treeString)
+ // scalastyle:on println
/**
* Prints the plans (logical and physical) to the console for debugging purposes.
@@ -319,7 +321,9 @@ class DataFrame private[sql](
ExplainCommand(
queryExecution.logical,
extended = extended).queryExecution.executedPlan.executeCollect().map {
+ // scalastyle:off println
r => println(r.getString(0))
+ // scalastyle:on println
}
}
@@ -392,7 +396,9 @@ class DataFrame private[sql](
* @group action
* @since 1.5.0
*/
+ // scalastyle:off println
def show(numRows: Int, truncate: Boolean): Unit = println(showString(numRows, truncate))
+ // scalastyle:on println
/**
* Returns a [[DataFrameNaFunctions]] for working with missing data.
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/debug/package.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/debug/package.scala
index 2964edac1a..e6081cb05b 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/debug/package.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/debug/package.scala
@@ -24,7 +24,7 @@ import org.apache.spark.unsafe.types.UTF8String
import scala.collection.mutable.HashSet
-import org.apache.spark.{AccumulatorParam, Accumulator}
+import org.apache.spark.{AccumulatorParam, Accumulator, Logging}
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.trees.TreeNodeRef
@@ -57,7 +57,7 @@ package object debug {
* Augments [[DataFrame]]s with debug methods.
*/
@DeveloperApi
- implicit class DebugQuery(query: DataFrame) {
+ implicit class DebugQuery(query: DataFrame) extends Logging {
def debug(): Unit = {
val plan = query.queryExecution.executedPlan
val visited = new collection.mutable.HashSet[TreeNodeRef]()
@@ -66,7 +66,7 @@ package object debug {
visited += new TreeNodeRef(s)
DebugNode(s)
}
- println(s"Results returned: ${debugPlan.execute().count()}")
+ logDebug(s"Results returned: ${debugPlan.execute().count()}")
debugPlan.foreach {
case d: DebugNode => d.dumpStats()
case _ =>
@@ -82,11 +82,11 @@ package object debug {
TypeCheck(s)
}
try {
- println(s"Results returned: ${debugPlan.execute().count()}")
+ logDebug(s"Results returned: ${debugPlan.execute().count()}")
} catch {
case e: Exception =>
def unwrap(e: Throwable): Throwable = if (e.getCause == null) e else unwrap(e.getCause)
- println(s"Deepest Error: ${unwrap(e)}")
+ logDebug(s"Deepest Error: ${unwrap(e)}")
}
}
}
@@ -119,11 +119,11 @@ package object debug {
val columnStats: Array[ColumnMetrics] = Array.fill(child.output.size)(new ColumnMetrics())
def dumpStats(): Unit = {
- println(s"== ${child.simpleString} ==")
- println(s"Tuples output: ${tupleCount.value}")
+ logDebug(s"== ${child.simpleString} ==")
+ logDebug(s"Tuples output: ${tupleCount.value}")
child.output.zip(columnStats).foreach { case(attr, metric) =>
val actualDataTypes = metric.elementTypes.value.mkString("{", ",", "}")
- println(s" ${attr.name} ${attr.dataType}: $actualDataTypes")
+ logDebug(s" ${attr.name} ${attr.dataType}: $actualDataTypes")
}
}