aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBo Meng <mengbo@hotmail.com>2016-05-22 08:10:54 -0500
committerSean Owen <sowen@cloudera.com>2016-05-22 08:10:54 -0500
commit72288fd67edc00f56e2e47eab2ef58fe4ff8c177 (patch)
tree0a0fd9ddd6289ede9e177079b5aa1daef3137141
parent7920296bf8f313e010205937d3ebcbbc7b1a1d9e (diff)
downloadspark-72288fd67edc00f56e2e47eab2ef58fe4ff8c177.tar.gz
spark-72288fd67edc00f56e2e47eab2ef58fe4ff8c177.tar.bz2
spark-72288fd67edc00f56e2e47eab2ef58fe4ff8c177.zip
[SPARK-15468][SQL] fix some typos
## What changes were proposed in this pull request? Fix some typos while browsing the codes. ## How was this patch tested? None and obvious. Author: Bo Meng <mengbo@hotmail.com> Author: bomeng <bmeng@us.ibm.com> Closes #13246 from bomeng/typo.
-rw-r--r--core/src/main/scala/org/apache/spark/util/AccumulatorV2.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateSafeProjection.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/UDTRegistration.scala2
5 files changed, 6 insertions, 6 deletions
diff --git a/core/src/main/scala/org/apache/spark/util/AccumulatorV2.scala b/core/src/main/scala/org/apache/spark/util/AccumulatorV2.scala
index 21ba46024d..0b9a47c112 100644
--- a/core/src/main/scala/org/apache/spark/util/AccumulatorV2.scala
+++ b/core/src/main/scala/org/apache/spark/util/AccumulatorV2.scala
@@ -55,7 +55,7 @@ abstract class AccumulatorV2[IN, OUT] extends Serializable {
/**
* Returns true if this accumulator has been registered. Note that all accumulators must be
- * registered before ues, or it will throw exception.
+ * registered before use, or it will throw exception.
*/
final def isRegistered: Boolean =
metadata != null && AccumulatorContext.get(metadata.id).isDefined
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateSafeProjection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateSafeProjection.scala
index b0b1212553..214dc40641 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateSafeProjection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateSafeProjection.scala
@@ -48,7 +48,7 @@ object GenerateSafeProjection extends CodeGenerator[Seq[Expression], Projection]
val tmp = ctx.freshName("tmp")
val output = ctx.freshName("safeRow")
val values = ctx.freshName("values")
- // These expressions could be splitted into multiple functions
+ // These expressions could be split into multiple functions
ctx.addMutableState("Object[]", values, s"this.$values = null;")
val rowClass = classOf[GenericInternalRow].getName
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index cace026701..a13c03a529 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -771,7 +771,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
* ******************************************************************************************** */
/**
* Create an expression from the given context. This method just passes the context on to the
- * vistor and only takes care of typing (We assume that the visitor returns an Expression here).
+ * visitor and only takes care of typing (We assume that the visitor returns an Expression here).
*/
protected def expression(ctx: ParserRuleContext): Expression = typedVisit(ctx)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
index 0065619135..f42e67ca6e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
@@ -227,8 +227,8 @@ object IntegerIndex {
* - Unnamed grouping expressions are named so that they can be referred to across phases of
* aggregation
* - Aggregations that appear multiple times are deduplicated.
- * - The compution of the aggregations themselves is separated from the final result. For example,
- * the `count` in `count + 1` will be split into an [[AggregateExpression]] and a final
+ * - The computation of the aggregations themselves is separated from the final result. For
+ * example, the `count` in `count + 1` will be split into an [[AggregateExpression]] and a final
* computation that computes `count.resultAttribute + 1`.
*/
object PhysicalAggregation {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UDTRegistration.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UDTRegistration.scala
index 0f24e51ed2..20ec75c706 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UDTRegistration.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UDTRegistration.scala
@@ -27,7 +27,7 @@ import org.apache.spark.util.Utils
* This object keeps the mappings between user classes and their User Defined Types (UDTs).
* Previously we use the annotation `SQLUserDefinedType` to register UDTs for user classes.
* However, by doing this, we add SparkSQL dependency on user classes. This object provides
- * alterntive approach to register UDTs for user classes.
+ * alternative approach to register UDTs for user classes.
*/
private[spark]
object UDTRegistration extends Serializable with Logging {