aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/main
diff options
context:
space:
mode:
authorHerman van Hovell <hvanhovell@databricks.com>2017-03-24 15:52:48 -0700
committerXiao Li <gatorsmile@gmail.com>2017-03-24 15:52:48 -0700
commit91fa80fe8a2480d64c430bd10f97b3d44c007bcc (patch)
tree27066ae13cf087df6e3d943f30cbb91eac238c7d /sql/core/src/main
parente8810b73c495b6d437dd3b9bb334762126b3c063 (diff)
downloadspark-91fa80fe8a2480d64c430bd10f97b3d44c007bcc.tar.gz
spark-91fa80fe8a2480d64c430bd10f97b3d44c007bcc.tar.bz2
spark-91fa80fe8a2480d64c430bd10f97b3d44c007bcc.zip
[SPARK-20070][SQL] Redact DataSourceScanExec treeString
## What changes were proposed in this pull request? The explain output of `DataSourceScanExec` can contain sensitive information (like Amazon keys). Such information should not end up in logs, or be exposed to non privileged users. This PR addresses this by adding a redaction facility for the `DataSourceScanExec.treeString`. A user can enable this by setting a regex in the `spark.redaction.string.regex` configuration. ## How was this patch tested? Added a unit test to check the output of DataSourceScanExec. Author: Herman van Hovell <hvanhovell@databricks.com> Closes #17397 from hvanhovell/SPARK-20070.
Diffstat (limited to 'sql/core/src/main')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala41
1 files changed, 25 insertions, 16 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala
index bfe9c8e351..28156b277f 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala
@@ -41,9 +41,33 @@ trait DataSourceScanExec extends LeafExecNode with CodegenSupport {
val relation: BaseRelation
val metastoreTableIdentifier: Option[TableIdentifier]
+ protected val nodeNamePrefix: String = ""
+
override val nodeName: String = {
s"Scan $relation ${metastoreTableIdentifier.map(_.unquotedString).getOrElse("")}"
}
+
+ override def simpleString: String = {
+ val metadataEntries = metadata.toSeq.sorted.map {
+ case (key, value) =>
+ key + ": " + StringUtils.abbreviate(redact(value), 100)
+ }
+ val metadataStr = Utils.truncatedString(metadataEntries, " ", ", ", "")
+ s"$nodeNamePrefix$nodeName${Utils.truncatedString(output, "[", ",", "]")}$metadataStr"
+ }
+
+ override def verboseString: String = redact(super.verboseString)
+
+ override def treeString(verbose: Boolean, addSuffix: Boolean): String = {
+ redact(super.treeString(verbose, addSuffix))
+ }
+
+ /**
+ * Shorthand for calling redactString() without specifying redacting rules
+ */
+ private def redact(text: String): String = {
+ Utils.redact(SparkSession.getActiveSession.get.sparkContext.conf, text)
+ }
}
/** Physical plan node for scanning data from a relation. */
@@ -85,15 +109,6 @@ case class RowDataSourceScanExec(
}
}
- override def simpleString: String = {
- val metadataEntries = for ((key, value) <- metadata.toSeq.sorted) yield {
- key + ": " + StringUtils.abbreviate(value, 100)
- }
-
- s"$nodeName${Utils.truncatedString(output, "[", ",", "]")}" +
- s"${Utils.truncatedString(metadataEntries, " ", ", ", "")}"
- }
-
override def inputRDDs(): Seq[RDD[InternalRow]] = {
rdd :: Nil
}
@@ -307,13 +322,7 @@ case class FileSourceScanExec(
}
}
- override def simpleString: String = {
- val metadataEntries = for ((key, value) <- metadata.toSeq.sorted) yield {
- key + ": " + StringUtils.abbreviate(value, 100)
- }
- val metadataStr = Utils.truncatedString(metadataEntries, " ", ", ", "")
- s"File$nodeName${Utils.truncatedString(output, "[", ",", "]")}$metadataStr"
- }
+ override val nodeNamePrefix: String = "File"
override protected def doProduce(ctx: CodegenContext): String = {
if (supportsBatch) {