aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2015-08-14 20:56:55 -0700
committerReynold Xin <rxin@databricks.com>2015-08-14 20:56:55 -0700
commit37586e5449ff8f892d41f0b6b8fa1de83dd3849e (patch)
tree40ae7de2cc5c3f64891aed0c2e96d994398d34e2 /sql
parente5fd60415fbfea2c5c02602f7ddbc999dd058064 (diff)
downloadspark-37586e5449ff8f892d41f0b6b8fa1de83dd3849e.tar.gz
spark-37586e5449ff8f892d41f0b6b8fa1de83dd3849e.tar.bz2
spark-37586e5449ff8f892d41f0b6b8fa1de83dd3849e.zip
[HOTFIX] fix duplicated braces
Author: Davies Liu <davies@databricks.com> Closes #8219 from davies/fix_typo.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLListener.scala2
3 files changed, 3 insertions, 3 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
index 970f3c8282..8581d6b496 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
@@ -164,7 +164,7 @@ object HiveTypeCoercion {
// Leave the same if the dataTypes match.
case Some(newType) if a.dataType == newType.dataType => a
case Some(newType) =>
- logDebug(s"Promoting $a to $newType in ${q.simpleString}}")
+ logDebug(s"Promoting $a to $newType in ${q.simpleString}")
newType
}
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
index 039c13bf16..8ee3b8bda8 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
@@ -170,7 +170,7 @@ object JdbcUtils extends Logging {
case BinaryType => "BLOB"
case TimestampType => "TIMESTAMP"
case DateType => "DATE"
- case t: DecimalType => s"DECIMAL(${t.precision}},${t.scale}})"
+ case t: DecimalType => s"DECIMAL(${t.precision},${t.scale})"
case _ => throw new IllegalArgumentException(s"Don't know how to save $field to JDBC")
})
val nullable = if (field.nullable) "" else "NOT NULL"
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLListener.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLListener.scala
index 0b9bad987c..5779c71f64 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLListener.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLListener.scala
@@ -162,7 +162,7 @@ private[sql] class SQLListener(sqlContext: SQLContext) extends SparkListener wit
// A task of an old stage attempt. Because a new stage is submitted, we can ignore it.
} else if (stageAttemptID > stageMetrics.stageAttemptId) {
logWarning(s"A task should not have a higher stageAttemptID ($stageAttemptID) then " +
- s"what we have seen (${stageMetrics.stageAttemptId}})")
+ s"what we have seen (${stageMetrics.stageAttemptId})")
} else {
// TODO We don't know the attemptId. Currently, what we can do is overriding the
// accumulator updates. However, if there are two same task are running, such as