aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src
diff options
context:
space:
mode:
Diffstat (limited to 'sql/core/src')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala3
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala10
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala32
3 files changed, 43 insertions, 2 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
index 7049e53a78..ca4d20a99c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
@@ -503,7 +503,8 @@ case class DescribeTableCommand(
describeBucketingInfo(metadata, buffer)
append(buffer, "Storage Desc Parameters:", "", "")
- metadata.storage.properties.foreach { case (key, value) =>
+ val maskedProperties = CatalogUtils.maskCredentials(metadata.storage.properties)
+ maskedProperties.foreach { case (key, value) =>
append(buffer, s" $key", value, "")
}
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala
index fa8dfa9640..695ba1234d 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.datasources
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.catalyst.catalog.CatalogTable
+import org.apache.spark.sql.catalyst.catalog.{CatalogTable, CatalogUtils}
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.logical.{Command, LogicalPlan}
import org.apache.spark.sql.execution.command.RunnableCommand
@@ -56,6 +56,14 @@ case class CreateTempViewUsing(
s"Temporary view '$tableIdent' should not have specified a database")
}
+ override def argString: String = {
+ s"[tableIdent:$tableIdent " +
+ userSpecifiedSchema.map(_ + " ").getOrElse("") +
+ s"replace:$replace " +
+ s"provider:$provider " +
+ CatalogUtils.maskCredentials(options)
+ }
+
def run(sparkSession: SparkSession): Seq[Row] = {
val dataSource = DataSource(
sparkSession,
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index f921939ada..b16be457ed 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -734,6 +734,38 @@ class JDBCSuite extends SparkFunSuite
}
}
+ test("hide credentials in create and describe a persistent/temp table") {
+ val password = "testPass"
+ val tableName = "tab1"
+ Seq("TABLE", "TEMPORARY VIEW").foreach { tableType =>
+ withTable(tableName) {
+ val df = sql(
+ s"""
+ |CREATE $tableType $tableName
+ |USING org.apache.spark.sql.jdbc
+ |OPTIONS (
+ | url '$urlWithUserAndPass',
+ | dbtable 'TEST.PEOPLE',
+ | user 'testUser',
+ | password '$password')
+ """.stripMargin)
+
+ val explain = ExplainCommand(df.queryExecution.logical, extended = true)
+ spark.sessionState.executePlan(explain).executedPlan.executeCollect().foreach { r =>
+ assert(!r.toString.contains(password))
+ }
+
+ sql(s"DESC FORMATTED $tableName").collect().foreach { r =>
+ assert(!r.toString().contains(password))
+ }
+
+ sql(s"DESC EXTENDED $tableName").collect().foreach { r =>
+ assert(!r.toString().contains(password))
+ }
+ }
+ }
+ }
+
test("SPARK 12941: The data type mapping for StringType to Oracle") {
val oracleDialect = JdbcDialects.get("jdbc:oracle://127.0.0.1/db")
assert(oracleDialect.getJDBCType(StringType).