aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-06-16 14:27:09 -0700
committerAndrew Or <andrew@databricks.com>2016-06-16 14:27:09 -0700
commit2d27eb1e753daefbd311136fc7de1a3e8fb9dc63 (patch)
treef8ae27d7d118c40e77f7eec0c2faa7c3f1598a85
parent796429d7117e2544207bd9d67bda8b603cb1a535 (diff)
downloadspark-2d27eb1e753daefbd311136fc7de1a3e8fb9dc63.tar.gz
spark-2d27eb1e753daefbd311136fc7de1a3e8fb9dc63.tar.bz2
spark-2d27eb1e753daefbd311136fc7de1a3e8fb9dc63.zip
[MINOR][DOCS][SQL] Fix some comments about types(TypeCoercion,Partition) and exceptions.
## What changes were proposed in this pull request? This PR contains a few changes on code comments. - `HiveTypeCoercion` is renamed into `TypeCoercion`. - `NoSuchDatabaseException` is only used for the absence of database. - For partition type inference, only `DoubleType` is considered. ## How was this patch tested? N/A Author: Dongjoon Hyun <dongjoon@apache.org> Closes #13674 from dongjoon-hyun/minor_doc_types.
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningUtils.scala2
4 files changed, 5 insertions, 5 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
index 16df628a57..baec6d14a2 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
@@ -73,7 +73,7 @@ object TypeCoercion {
DoubleType)
/**
- * Case 1 type widening (see the classdoc comment above for HiveTypeCoercion).
+ * Case 1 type widening (see the classdoc comment above for TypeCoercion).
*
* Find the tightest common type of two types that might be used in a binary expression.
* This handles all numeric types except fixed-precision decimals interacting with each other or
@@ -132,7 +132,7 @@ object TypeCoercion {
}
/**
- * Case 2 type widening (see the classdoc comment above for HiveTypeCoercion).
+ * Case 2 type widening (see the classdoc comment above for TypeCoercion).
*
* i.e. the main difference with [[findTightestCommonTypeOfTwo]] is that here we allow some
* loss of precision when widening decimal and double.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
index 81974b282b..6714846e8c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
@@ -27,7 +27,7 @@ import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
* can be accessed in multiple threads. This is an external catalog because it is expected to
* interact with external systems.
*
- * Implementations should throw [[NoSuchDatabaseException]] when table or database don't exist.
+ * Implementations should throw [[NoSuchDatabaseException]] when databases don't exist.
*/
abstract class ExternalCatalog {
import CatalogTypes.TablePartitionSpec
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index 52e021070e..cc8175c0a3 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -322,7 +322,7 @@ final class Decimal extends Ordered[Decimal] with Serializable {
}
}
- // HiveTypeCoercion will take care of the precision, scale of result
+ // TypeCoercion will take care of the precision, scale of result
def * (that: Decimal): Decimal =
Decimal(toJavaBigDecimal.multiply(that.toJavaBigDecimal, MATH_CONTEXT))
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningUtils.scala
index 2340ff0afe..388df7002d 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningUtils.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningUtils.scala
@@ -159,7 +159,7 @@ private[sql] object PartitioningUtils {
* Seq(
* Literal.create(42, IntegerType),
* Literal.create("hello", StringType),
- * Literal.create(3.14, FloatType)))
+ * Literal.create(3.14, DoubleType)))
* }}}
* and the path when we stop the discovery is:
* {{{