aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2015-06-13 16:13:26 -0700
committerReynold Xin <rxin@databricks.com>2015-06-13 16:13:26 -0700
commitce1041c38f92449ca14894551c358c875672afe6 (patch)
tree43de79c5aea1e508c8b9b2854e679a3b4276aca0 /sql
parentd986fb9a378416248768828e6e6c7405697f9a5a (diff)
downloadspark-ce1041c38f92449ca14894551c358c875672afe6.tar.gz
spark-ce1041c38f92449ca14894551c358c875672afe6.tar.bz2
spark-ce1041c38f92449ca14894551c358c875672afe6.zip
[SPARK-8346] [SQL] Use InternalRow instread of catalyst.InternalRow
cc rxin marmbrus Author: Davies Liu <davies@databricks.com> Closes #6802 from davies/cleanup_internalrow and squashes the following commits: 769d2aa [Davies Liu] remove not needed cast 4acbbe4 [Davies Liu] catalyst.Internal -> InternalRow
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala10
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExtractValue.scala10
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Projection.scala105
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala48
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala68
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala12
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala3
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala10
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala8
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionals.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala14
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala8
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala13
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/package.scala6
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala6
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashOuterJoin.scala2
22 files changed, 176 insertions, 183 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
index 5de188d418..c9d9142578 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
@@ -68,7 +68,7 @@ case class UnresolvedAttribute(nameParts: Seq[String])
override def withName(newName: String): UnresolvedAttribute = UnresolvedAttribute.quoted(newName)
// Unresolved attributes are transient at compile time and don't get evaluated during execution.
- override def eval(input: catalyst.InternalRow = null): Any =
+ override def eval(input: InternalRow = null): Any =
throw new TreeNodeException(this, s"No function to evaluate expression. type: ${this.nodeName}")
override def toString: String = s"'$name"
@@ -86,7 +86,7 @@ case class UnresolvedFunction(name: String, children: Seq[Expression]) extends E
override lazy val resolved = false
// Unresolved functions are transient at compile time and don't get evaluated during execution.
- override def eval(input: catalyst.InternalRow = null): Any =
+ override def eval(input: InternalRow = null): Any =
throw new TreeNodeException(this, s"No function to evaluate expression. type: ${this.nodeName}")
override def toString: String = s"'$name(${children.mkString(",")})"
@@ -108,7 +108,7 @@ trait Star extends NamedExpression with trees.LeafNode[Expression] {
override lazy val resolved = false
// Star gets expanded at runtime so we never evaluate a Star.
- override def eval(input: catalyst.InternalRow = null): Any =
+ override def eval(input: InternalRow = null): Any =
throw new TreeNodeException(this, s"No function to evaluate expression. type: ${this.nodeName}")
def expand(input: Seq[Attribute], resolver: Resolver): Seq[NamedExpression]
@@ -167,7 +167,7 @@ case class MultiAlias(child: Expression, names: Seq[String])
override lazy val resolved = false
- override def eval(input: catalyst.InternalRow = null): Any =
+ override def eval(input: InternalRow = null): Any =
throw new TreeNodeException(this, s"No function to evaluate expression. type: ${this.nodeName}")
override def toString: String = s"$child AS $names"
@@ -201,7 +201,7 @@ case class UnresolvedExtractValue(child: Expression, extraction: Expression)
override def nullable: Boolean = throw new UnresolvedException(this, "nullable")
override lazy val resolved = false
- override def eval(input: catalyst.InternalRow = null): Any =
+ override def eval(input: InternalRow = null): Any =
throw new TreeNodeException(this, s"No function to evaluate expression. type: ${this.nodeName}")
override def toString: String = s"$child[$extraction]"
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
index afbf30af33..05a04bdff9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
@@ -394,7 +394,7 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression w
}
// TODO: Could be faster?
val newRow = new GenericMutableRow(from.fields.size)
- buildCast[catalyst.InternalRow](_, row => {
+ buildCast[InternalRow](_, row => {
var i = 0
while (i < row.length) {
val v = row(i)
@@ -426,7 +426,7 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression w
private[this] lazy val cast: Any => Any = cast(child.dataType, dataType)
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
val evaluated = child.eval(input)
if (evaluated == null) null else cast(evaluated)
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExtractValue.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExtractValue.scala
index 16f3ccc3d6..4aaabff15b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExtractValue.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExtractValue.scala
@@ -105,8 +105,8 @@ case class GetStructField(child: Expression, field: StructField, ordinal: Int)
override def foldable: Boolean = child.foldable
override def toString: String = s"$child.${field.name}"
- override def eval(input: catalyst.InternalRow): Any = {
- val baseValue = child.eval(input).asInstanceOf[catalyst.InternalRow]
+ override def eval(input: InternalRow): Any = {
+ val baseValue = child.eval(input).asInstanceOf[InternalRow]
if (baseValue == null) null else baseValue(ordinal)
}
}
@@ -125,8 +125,8 @@ case class GetArrayStructFields(
override def foldable: Boolean = child.foldable
override def toString: String = s"$child.${field.name}"
- override def eval(input: catalyst.InternalRow): Any = {
- val baseValue = child.eval(input).asInstanceOf[Seq[catalyst.InternalRow]]
+ override def eval(input: InternalRow): Any = {
+ val baseValue = child.eval(input).asInstanceOf[Seq[InternalRow]]
if (baseValue == null) null else {
baseValue.map { row =>
if (row == null) null else row(ordinal)
@@ -146,7 +146,7 @@ abstract class ExtractValueWithOrdinal extends ExtractValue {
override def toString: String = s"$child[$ordinal]"
override def children: Seq[Expression] = child :: ordinal :: Nil
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
val value = child.eval(input)
if (value == null) {
null
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Projection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Projection.scala
index d6806f78ab..d5967438cc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Projection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Projection.scala
@@ -17,9 +17,6 @@
package org.apache.spark.sql.catalyst.expressions
-import org.apache.spark.sql.catalyst
-
-
/**
* A [[Projection]] that is calculated by calling the `eval` of each of the specified expressions.
* @param expressions a sequence of expressions that determine the value of each column of the
@@ -32,7 +29,7 @@ class InterpretedProjection(expressions: Seq[Expression]) extends Projection {
// null check is required for when Kryo invokes the no-arg constructor.
protected val exprArray = if (expressions != null) expressions.toArray else null
- def apply(input: catalyst.InternalRow): catalyst.InternalRow = {
+ def apply(input: InternalRow): InternalRow = {
val outputArray = new Array[Any](exprArray.length)
var i = 0
while (i < exprArray.length) {
@@ -57,14 +54,14 @@ case class InterpretedMutableProjection(expressions: Seq[Expression]) extends Mu
private[this] val exprArray = expressions.toArray
private[this] var mutableRow: MutableRow = new GenericMutableRow(exprArray.size)
- def currentValue: catalyst.InternalRow = mutableRow
+ def currentValue: InternalRow = mutableRow
override def target(row: MutableRow): MutableProjection = {
mutableRow = row
this
}
- override def apply(input: catalyst.InternalRow): catalyst.InternalRow = {
+ override def apply(input: InternalRow): InternalRow = {
var i = 0
while (i < exprArray.length) {
mutableRow(i) = exprArray(i).eval(input)
@@ -78,31 +75,31 @@ case class InterpretedMutableProjection(expressions: Seq[Expression]) extends Mu
* A mutable wrapper that makes two rows appear as a single concatenated row. Designed to
* be instantiated once per thread and reused.
*/
-class JoinedRow extends catalyst.InternalRow {
- private[this] var row1: catalyst.InternalRow = _
- private[this] var row2: catalyst.InternalRow = _
+class JoinedRow extends InternalRow {
+ private[this] var row1: InternalRow = _
+ private[this] var row2: InternalRow = _
- def this(left: catalyst.InternalRow, right: catalyst.InternalRow) = {
+ def this(left: InternalRow, right: InternalRow) = {
this()
row1 = left
row2 = right
}
/** Updates this JoinedRow to used point at two new base rows. Returns itself. */
- def apply(r1: catalyst.InternalRow, r2: catalyst.InternalRow): catalyst.InternalRow = {
+ def apply(r1: InternalRow, r2: InternalRow): InternalRow = {
row1 = r1
row2 = r2
this
}
/** Updates this JoinedRow by updating its left base row. Returns itself. */
- def withLeft(newLeft: catalyst.InternalRow): catalyst.InternalRow = {
+ def withLeft(newLeft: InternalRow): InternalRow = {
row1 = newLeft
this
}
/** Updates this JoinedRow by updating its right base row. Returns itself. */
- def withRight(newRight: catalyst.InternalRow): catalyst.InternalRow = {
+ def withRight(newRight: InternalRow): InternalRow = {
row2 = newRight
this
}
@@ -144,7 +141,7 @@ class JoinedRow extends catalyst.InternalRow {
override def getAs[T](i: Int): T =
if (i < row1.length) row1.getAs[T](i) else row2.getAs[T](i - row1.length)
- override def copy(): catalyst.InternalRow = {
+ override def copy(): InternalRow = {
val totalSize = row1.length + row2.length
val copiedValues = new Array[Any](totalSize)
var i = 0
@@ -178,31 +175,31 @@ class JoinedRow extends catalyst.InternalRow {
* Row will be referenced, increasing the opportunity for the JIT to play tricks. This sounds
* crazy but in benchmarks it had noticeable effects.
*/
-class JoinedRow2 extends catalyst.InternalRow {
- private[this] var row1: catalyst.InternalRow = _
- private[this] var row2: catalyst.InternalRow = _
+class JoinedRow2 extends InternalRow {
+ private[this] var row1: InternalRow = _
+ private[this] var row2: InternalRow = _
- def this(left: catalyst.InternalRow, right: catalyst.InternalRow) = {
+ def this(left: InternalRow, right: InternalRow) = {
this()
row1 = left
row2 = right
}
/** Updates this JoinedRow to used point at two new base rows. Returns itself. */
- def apply(r1: catalyst.InternalRow, r2: catalyst.InternalRow): catalyst.InternalRow = {
+ def apply(r1: InternalRow, r2: InternalRow): InternalRow = {
row1 = r1
row2 = r2
this
}
/** Updates this JoinedRow by updating its left base row. Returns itself. */
- def withLeft(newLeft: catalyst.InternalRow): catalyst.InternalRow = {
+ def withLeft(newLeft: InternalRow): InternalRow = {
row1 = newLeft
this
}
/** Updates this JoinedRow by updating its right base row. Returns itself. */
- def withRight(newRight: catalyst.InternalRow): catalyst.InternalRow = {
+ def withRight(newRight: InternalRow): InternalRow = {
row2 = newRight
this
}
@@ -244,7 +241,7 @@ class JoinedRow2 extends catalyst.InternalRow {
override def getAs[T](i: Int): T =
if (i < row1.length) row1.getAs[T](i) else row2.getAs[T](i - row1.length)
- override def copy(): catalyst.InternalRow = {
+ override def copy(): InternalRow = {
val totalSize = row1.length + row2.length
val copiedValues = new Array[Any](totalSize)
var i = 0
@@ -272,31 +269,31 @@ class JoinedRow2 extends catalyst.InternalRow {
/**
* JIT HACK: Replace with macros
*/
-class JoinedRow3 extends catalyst.InternalRow {
- private[this] var row1: catalyst.InternalRow = _
- private[this] var row2: catalyst.InternalRow = _
+class JoinedRow3 extends InternalRow {
+ private[this] var row1: InternalRow = _
+ private[this] var row2: InternalRow = _
- def this(left: catalyst.InternalRow, right: catalyst.InternalRow) = {
+ def this(left: InternalRow, right: InternalRow) = {
this()
row1 = left
row2 = right
}
/** Updates this JoinedRow to used point at two new base rows. Returns itself. */
- def apply(r1: catalyst.InternalRow, r2: catalyst.InternalRow): catalyst.InternalRow = {
+ def apply(r1: InternalRow, r2: InternalRow): InternalRow = {
row1 = r1
row2 = r2
this
}
/** Updates this JoinedRow by updating its left base row. Returns itself. */
- def withLeft(newLeft: catalyst.InternalRow): catalyst.InternalRow = {
+ def withLeft(newLeft: InternalRow): InternalRow = {
row1 = newLeft
this
}
/** Updates this JoinedRow by updating its right base row. Returns itself. */
- def withRight(newRight: catalyst.InternalRow): catalyst.InternalRow = {
+ def withRight(newRight: InternalRow): InternalRow = {
row2 = newRight
this
}
@@ -338,7 +335,7 @@ class JoinedRow3 extends catalyst.InternalRow {
override def getAs[T](i: Int): T =
if (i < row1.length) row1.getAs[T](i) else row2.getAs[T](i - row1.length)
- override def copy(): catalyst.InternalRow = {
+ override def copy(): InternalRow = {
val totalSize = row1.length + row2.length
val copiedValues = new Array[Any](totalSize)
var i = 0
@@ -366,31 +363,31 @@ class JoinedRow3 extends catalyst.InternalRow {
/**
* JIT HACK: Replace with macros
*/
-class JoinedRow4 extends catalyst.InternalRow {
- private[this] var row1: catalyst.InternalRow = _
- private[this] var row2: catalyst.InternalRow = _
+class JoinedRow4 extends InternalRow {
+ private[this] var row1: InternalRow = _
+ private[this] var row2: InternalRow = _
- def this(left: catalyst.InternalRow, right: catalyst.InternalRow) = {
+ def this(left: InternalRow, right: InternalRow) = {
this()
row1 = left
row2 = right
}
/** Updates this JoinedRow to used point at two new base rows. Returns itself. */
- def apply(r1: catalyst.InternalRow, r2: catalyst.InternalRow): catalyst.InternalRow = {
+ def apply(r1: InternalRow, r2: InternalRow): InternalRow = {
row1 = r1
row2 = r2
this
}
/** Updates this JoinedRow by updating its left base row. Returns itself. */
- def withLeft(newLeft: catalyst.InternalRow): catalyst.InternalRow = {
+ def withLeft(newLeft: InternalRow): InternalRow = {
row1 = newLeft
this
}
/** Updates this JoinedRow by updating its right base row. Returns itself. */
- def withRight(newRight: catalyst.InternalRow): catalyst.InternalRow = {
+ def withRight(newRight: InternalRow): InternalRow = {
row2 = newRight
this
}
@@ -432,7 +429,7 @@ class JoinedRow4 extends catalyst.InternalRow {
override def getAs[T](i: Int): T =
if (i < row1.length) row1.getAs[T](i) else row2.getAs[T](i - row1.length)
- override def copy(): catalyst.InternalRow = {
+ override def copy(): InternalRow = {
val totalSize = row1.length + row2.length
val copiedValues = new Array[Any](totalSize)
var i = 0
@@ -460,31 +457,31 @@ class JoinedRow4 extends catalyst.InternalRow {
/**
* JIT HACK: Replace with macros
*/
-class JoinedRow5 extends catalyst.InternalRow {
- private[this] var row1: catalyst.InternalRow = _
- private[this] var row2: catalyst.InternalRow = _
+class JoinedRow5 extends InternalRow {
+ private[this] var row1: InternalRow = _
+ private[this] var row2: InternalRow = _
- def this(left: catalyst.InternalRow, right: catalyst.InternalRow) = {
+ def this(left: InternalRow, right: InternalRow) = {
this()
row1 = left
row2 = right
}
/** Updates this JoinedRow to used point at two new base rows. Returns itself. */
- def apply(r1: catalyst.InternalRow, r2: catalyst.InternalRow): catalyst.InternalRow = {
+ def apply(r1: InternalRow, r2: InternalRow): InternalRow = {
row1 = r1
row2 = r2
this
}
/** Updates this JoinedRow by updating its left base row. Returns itself. */
- def withLeft(newLeft: catalyst.InternalRow): catalyst.InternalRow = {
+ def withLeft(newLeft: InternalRow): InternalRow = {
row1 = newLeft
this
}
/** Updates this JoinedRow by updating its right base row. Returns itself. */
- def withRight(newRight: catalyst.InternalRow): catalyst.InternalRow = {
+ def withRight(newRight: InternalRow): InternalRow = {
row2 = newRight
this
}
@@ -526,7 +523,7 @@ class JoinedRow5 extends catalyst.InternalRow {
override def getAs[T](i: Int): T =
if (i < row1.length) row1.getAs[T](i) else row2.getAs[T](i - row1.length)
- override def copy(): catalyst.InternalRow = {
+ override def copy(): InternalRow = {
val totalSize = row1.length + row2.length
val copiedValues = new Array[Any](totalSize)
var i = 0
@@ -554,31 +551,31 @@ class JoinedRow5 extends catalyst.InternalRow {
/**
* JIT HACK: Replace with macros
*/
-class JoinedRow6 extends catalyst.InternalRow {
- private[this] var row1: catalyst.InternalRow = _
- private[this] var row2: catalyst.InternalRow = _
+class JoinedRow6 extends InternalRow {
+ private[this] var row1: InternalRow = _
+ private[this] var row2: InternalRow = _
- def this(left: catalyst.InternalRow, right: catalyst.InternalRow) = {
+ def this(left: InternalRow, right: InternalRow) = {
this()
row1 = left
row2 = right
}
/** Updates this JoinedRow to used point at two new base rows. Returns itself. */
- def apply(r1: catalyst.InternalRow, r2: catalyst.InternalRow): catalyst.InternalRow = {
+ def apply(r1: InternalRow, r2: InternalRow): InternalRow = {
row1 = r1
row2 = r2
this
}
/** Updates this JoinedRow by updating its left base row. Returns itself. */
- def withLeft(newLeft: catalyst.InternalRow): catalyst.InternalRow = {
+ def withLeft(newLeft: InternalRow): InternalRow = {
row1 = newLeft
this
}
/** Updates this JoinedRow by updating its right base row. Returns itself. */
- def withRight(newRight: catalyst.InternalRow): catalyst.InternalRow = {
+ def withRight(newRight: InternalRow): InternalRow = {
row2 = newRight
this
}
@@ -620,7 +617,7 @@ class JoinedRow6 extends catalyst.InternalRow {
override def getAs[T](i: Int): T =
if (i < row1.length) row1.getAs[T](i) else row2.getAs[T](i - row1.length)
- override def copy(): catalyst.InternalRow = {
+ override def copy(): InternalRow = {
val totalSize = row1.length + row2.length
val copiedValues = new Array[Any](totalSize)
var i = 0
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
index 40f235fc19..b3ce698c55 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
@@ -58,7 +58,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
private[this] val f = children.size match {
case 0 =>
val func = function.asInstanceOf[() => Any]
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func()
}
@@ -66,7 +66,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
val func = function.asInstanceOf[(Any) => Any]
val child0 = children(0)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)))
}
@@ -77,7 +77,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
val child1 = children(1)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)))
@@ -91,7 +91,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -108,7 +108,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -128,7 +128,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -151,7 +151,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -177,7 +177,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -206,7 +206,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -238,7 +238,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -273,7 +273,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -311,7 +311,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -352,7 +352,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -396,7 +396,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -443,7 +443,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -493,7 +493,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -546,7 +546,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -602,7 +602,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -661,7 +661,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -723,7 +723,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -788,7 +788,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
lazy val converter19 = CatalystTypeConverters.createToScalaConverter(child19.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -856,7 +856,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
lazy val converter19 = CatalystTypeConverters.createToScalaConverter(child19.dataType)
lazy val converter20 = CatalystTypeConverters.createToScalaConverter(child20.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -927,7 +927,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
lazy val converter19 = CatalystTypeConverters.createToScalaConverter(child19.dataType)
lazy val converter20 = CatalystTypeConverters.createToScalaConverter(child20.dataType)
lazy val converter21 = CatalystTypeConverters.createToScalaConverter(child21.dataType)
- (input: catalyst.InternalRow) => {
+ (input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
@@ -956,6 +956,6 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
// scalastyle:on
private[this] val converter = CatalystTypeConverters.createToCatalystConverter(dataType)
- override def eval(input: catalyst.InternalRow): Any = converter(f(input))
+ override def eval(input: InternalRow): Any = converter(f(input))
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
index f9e8150a68..00d2e499c5 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
@@ -38,7 +38,7 @@ abstract class AggregateExpression extends Expression {
* [[AggregateExpression.eval]] should never be invoked because [[AggregateExpression]]'s are
* replaced with a physical aggregate operator at runtime.
*/
- override def eval(input: catalyst.InternalRow = null): Any =
+ override def eval(input: InternalRow = null): Any =
throw new TreeNodeException(this, s"No function to evaluate expression. type: ${this.nodeName}")
}
@@ -81,7 +81,7 @@ abstract class AggregateFunction
override def nullable: Boolean = base.nullable
override def dataType: DataType = base.dataType
- def update(input: catalyst.InternalRow): Unit
+ def update(input: InternalRow): Unit
// Do we really need this?
override def newInstance(): AggregateFunction = {
@@ -109,7 +109,7 @@ case class MinFunction(expr: Expression, base: AggregateExpression) extends Aggr
val currentMin: MutableLiteral = MutableLiteral(null, expr.dataType)
val cmp = GreaterThan(currentMin, expr)
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
if (currentMin.value == null) {
currentMin.value = expr.eval(input)
} else if (cmp.eval(input) == true) {
@@ -117,7 +117,7 @@ case class MinFunction(expr: Expression, base: AggregateExpression) extends Aggr
}
}
- override def eval(input: catalyst.InternalRow): Any = currentMin.value
+ override def eval(input: InternalRow): Any = currentMin.value
}
case class Max(child: Expression) extends PartialAggregate with trees.UnaryNode[Expression] {
@@ -140,7 +140,7 @@ case class MaxFunction(expr: Expression, base: AggregateExpression) extends Aggr
val currentMax: MutableLiteral = MutableLiteral(null, expr.dataType)
val cmp = LessThan(currentMax, expr)
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
if (currentMax.value == null) {
currentMax.value = expr.eval(input)
} else if (cmp.eval(input) == true) {
@@ -148,7 +148,7 @@ case class MaxFunction(expr: Expression, base: AggregateExpression) extends Aggr
}
}
- override def eval(input: catalyst.InternalRow): Any = currentMax.value
+ override def eval(input: InternalRow): Any = currentMax.value
}
case class Count(child: Expression) extends PartialAggregate with trees.UnaryNode[Expression] {
@@ -206,14 +206,14 @@ case class CollectHashSetFunction(
@transient
val distinctValue = new InterpretedProjection(expr)
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
val evaluatedExpr = distinctValue(input)
if (!evaluatedExpr.anyNull) {
seen.add(evaluatedExpr)
}
}
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
seen
}
}
@@ -239,7 +239,7 @@ case class CombineSetsAndCountFunction(
val seen = new OpenHashSet[Any]()
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
val inputSetEval = inputSet.eval(input).asInstanceOf[OpenHashSet[Any]]
val inputIterator = inputSetEval.iterator
while (inputIterator.hasNext) {
@@ -247,7 +247,7 @@ case class CombineSetsAndCountFunction(
}
}
- override def eval(input: catalyst.InternalRow): Any = seen.size.toLong
+ override def eval(input: InternalRow): Any = seen.size.toLong
}
/** The data type of ApproxCountDistinctPartition since its output is a HyperLogLog object. */
@@ -454,7 +454,7 @@ case class CombineSetsAndSumFunction(
val seen = new OpenHashSet[Any]()
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
val inputSetEval = inputSet.eval(input).asInstanceOf[OpenHashSet[Any]]
val inputIterator = inputSetEval.iterator
while (inputIterator.hasNext) {
@@ -462,8 +462,8 @@ case class CombineSetsAndSumFunction(
}
}
- override def eval(input: catalyst.InternalRow): Any = {
- val casted = seen.asInstanceOf[OpenHashSet[catalyst.InternalRow]]
+ override def eval(input: InternalRow): Any = {
+ val casted = seen.asInstanceOf[OpenHashSet[InternalRow]]
if (casted.size == 0) {
null
} else {
@@ -525,7 +525,7 @@ case class AverageFunction(expr: Expression, base: AggregateExpression)
private def addFunction(value: Any) = Add(sum,
Cast(Literal.create(value, expr.dataType), calcType))
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
if (count == 0L) {
null
} else {
@@ -542,7 +542,7 @@ case class AverageFunction(expr: Expression, base: AggregateExpression)
}
}
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
val evaluatedExpr = expr.eval(input)
if (evaluatedExpr != null) {
count += 1
@@ -556,14 +556,14 @@ case class CountFunction(expr: Expression, base: AggregateExpression) extends Ag
var count: Long = _
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
val evaluatedExpr = expr.eval(input)
if (evaluatedExpr != null) {
count += 1L
}
}
- override def eval(input: catalyst.InternalRow): Any = count
+ override def eval(input: InternalRow): Any = count
}
case class ApproxCountDistinctPartitionFunction(
@@ -575,14 +575,14 @@ case class ApproxCountDistinctPartitionFunction(
private val hyperLogLog = new HyperLogLog(relativeSD)
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
val evaluatedExpr = expr.eval(input)
if (evaluatedExpr != null) {
hyperLogLog.offer(evaluatedExpr)
}
}
- override def eval(input: catalyst.InternalRow): Any = hyperLogLog
+ override def eval(input: InternalRow): Any = hyperLogLog
}
case class ApproxCountDistinctMergeFunction(
@@ -594,12 +594,12 @@ case class ApproxCountDistinctMergeFunction(
private val hyperLogLog = new HyperLogLog(relativeSD)
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
val evaluatedExpr = expr.eval(input)
hyperLogLog.addAll(evaluatedExpr.asInstanceOf[HyperLogLog])
}
- override def eval(input: catalyst.InternalRow): Any = hyperLogLog.cardinality()
+ override def eval(input: InternalRow): Any = hyperLogLog.cardinality()
}
case class SumFunction(expr: Expression, base: AggregateExpression) extends AggregateFunction {
@@ -620,11 +620,11 @@ case class SumFunction(expr: Expression, base: AggregateExpression) extends Aggr
private val addFunction =
Coalesce(Seq(Add(Coalesce(Seq(sum, zero)), Cast(expr, calcType)), sum, zero))
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
sum.update(addFunction, input)
}
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
expr.dataType match {
case DecimalType.Fixed(_, _) =>
Cast(sum, dataType).eval(null)
@@ -653,7 +653,7 @@ case class CombineSumFunction(expr: Expression, base: AggregateExpression)
private val addFunction =
Coalesce(Seq(Add(Coalesce(Seq(sum, zero)), Cast(expr, calcType)), sum, zero))
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
val result = expr.eval(input)
// partial sum result can be null only when no input rows present
if(result != null) {
@@ -661,7 +661,7 @@ case class CombineSumFunction(expr: Expression, base: AggregateExpression)
}
}
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
expr.dataType match {
case DecimalType.Fixed(_, _) =>
Cast(sum, dataType).eval(null)
@@ -677,14 +677,14 @@ case class SumDistinctFunction(expr: Expression, base: AggregateExpression)
private val seen = new scala.collection.mutable.HashSet[Any]()
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
val evaluatedExpr = expr.eval(input)
if (evaluatedExpr != null) {
seen += evaluatedExpr
}
}
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
if (seen.size == 0) {
null
} else {
@@ -708,14 +708,14 @@ case class CountDistinctFunction(
@transient
val distinctValue = new InterpretedProjection(expr)
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
val evaluatedExpr = distinctValue(input)
if (!evaluatedExpr.anyNull) {
seen.add(evaluatedExpr)
}
}
- override def eval(input: catalyst.InternalRow): Any = seen.size.toLong
+ override def eval(input: InternalRow): Any = seen.size.toLong
}
case class FirstFunction(expr: Expression, base: AggregateExpression) extends AggregateFunction {
@@ -723,13 +723,13 @@ case class FirstFunction(expr: Expression, base: AggregateExpression) extends Ag
var result: Any = null
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
if (result == null) {
result = expr.eval(input)
}
}
- override def eval(input: catalyst.InternalRow): Any = result
+ override def eval(input: InternalRow): Any = result
}
case class LastFunction(expr: Expression, base: AggregateExpression) extends AggregateFunction {
@@ -737,11 +737,11 @@ case class LastFunction(expr: Expression, base: AggregateExpression) extends Agg
var result: Any = null
- override def update(input: catalyst.InternalRow): Unit = {
+ override def update(input: InternalRow): Unit = {
result = input
}
- override def eval(input: catalyst.InternalRow): Any = {
- if (result != null) expr.eval(result.asInstanceOf[catalyst.InternalRow]) else null
+ override def eval(input: InternalRow): Any = {
+ if (result != null) expr.eval(result.asInstanceOf[InternalRow]) else null
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
index 0ba2ff75aa..18ddac1b59 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
@@ -30,7 +30,7 @@ abstract class UnaryArithmetic extends UnaryExpression {
override def nullable: Boolean = child.nullable
override def dataType: DataType = child.dataType
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
val evalE = child.eval(input)
if (evalE == null) {
null
@@ -125,7 +125,7 @@ abstract class BinaryArithmetic extends BinaryExpression {
protected def checkTypesInternal(t: DataType): TypeCheckResult
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
val evalE1 = left.eval(input)
if(evalE1 == null) {
null
@@ -220,7 +220,7 @@ case class Divide(left: Expression, right: Expression) extends BinaryArithmetic
case it: IntegralType => it.integral.asInstanceOf[Integral[Any]].quot
}
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
val evalE2 = right.eval(input)
if (evalE2 == null || evalE2 == 0) {
null
@@ -280,7 +280,7 @@ case class Remainder(left: Expression, right: Expression) extends BinaryArithmet
case i: FractionalType => i.asIntegral.asInstanceOf[Integral[Any]]
}
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
val evalE2 = right.eval(input)
if (evalE2 == null || evalE2 == 0) {
null
@@ -331,7 +331,7 @@ case class MaxOf(left: Expression, right: Expression) extends BinaryArithmetic {
private lazy val ordering = TypeUtils.getOrdering(dataType)
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
val evalE1 = left.eval(input)
val evalE2 = right.eval(input)
if (evalE1 == null) {
@@ -385,7 +385,7 @@ case class MinOf(left: Expression, right: Expression) extends BinaryArithmetic {
private lazy val ordering = TypeUtils.getOrdering(dataType)
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
val evalE1 = left.eval(input)
val evalE2 = right.eval(input)
if (evalE1 == null) {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
index 244a06638f..54f06aaa10 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
@@ -35,7 +35,7 @@ class IntegerHashSet extends org.apache.spark.util.collection.OpenHashSet[Int]
class LongHashSet extends org.apache.spark.util.collection.OpenHashSet[Long]
/**
- * Java source for evaluating an [[Expression]] given a [[catalyst.InternalRow]] of input.
+ * Java source for evaluating an [[Expression]] given a [[InternalRow]] of input.
*
* @param code The sequence of statements required to evaluate the expression.
* @param isNull A term that holds a boolean value representing whether the expression evaluated
@@ -184,13 +184,13 @@ class CodeGenContext {
}
/**
- * List of data types that have special accessors and setters in [[catalyst.InternalRow]].
+ * List of data types that have special accessors and setters in [[InternalRow]].
*/
val nativeTypes =
Seq(IntegerType, BooleanType, LongType, DoubleType, FloatType, ShortType, ByteType)
/**
- * Returns true if the data type has a special accessor and setter in [[catalyst.InternalRow]].
+ * Returns true if the data type has a special accessor and setter in [[InternalRow]].
*/
def isNativeType(dt: DataType): Boolean = nativeTypes.contains(dt)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala
index 35cb954c54..573a9ea0a5 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala
@@ -17,7 +17,6 @@
package org.apache.spark.sql.catalyst.expressions.codegen
-import org.apache.spark.sql.catalyst
import org.apache.spark.sql.catalyst.expressions._
// MutableProjection is not accessible in Java
@@ -25,7 +24,7 @@ abstract class BaseMutableProjection extends MutableProjection {}
/**
* Generates byte code that produces a [[MutableRow]] object that can update itself based on a new
- * input [[catalyst.InternalRow]] for a fixed set of [[Expression Expressions]].
+ * input [[InternalRow]] for a fixed set of [[Expression Expressions]].
*/
object GenerateMutableProjection extends CodeGenerator[Seq[Expression], () => MutableProjection] {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala
index db5d570aeb..3e9ee60f33 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala
@@ -19,15 +19,15 @@ package org.apache.spark.sql.catalyst.expressions.codegen
import org.apache.spark.Logging
import org.apache.spark.annotation.Private
-import org.apache.spark.sql.{catalyst, Row}
+import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.expressions._
/**
* Inherits some default implementation for Java from `Ordering[Row]`
*/
@Private
-class BaseOrdering extends Ordering[catalyst.InternalRow] {
- def compare(a: catalyst.InternalRow, b: catalyst.InternalRow): Int = {
+class BaseOrdering extends Ordering[InternalRow] {
+ def compare(a: InternalRow, b: InternalRow): Int = {
throw new UnsupportedOperationException
}
}
@@ -37,7 +37,7 @@ class BaseOrdering extends Ordering[catalyst.InternalRow] {
* [[Expression Expressions]].
*/
object GenerateOrdering
- extends CodeGenerator[Seq[SortOrder], Ordering[catalyst.InternalRow]] with Logging {
+ extends CodeGenerator[Seq[SortOrder], Ordering[InternalRow]] with Logging {
import scala.reflect.runtime.universe._
protected def canonicalize(in: Seq[SortOrder]): Seq[SortOrder] =
@@ -46,7 +46,7 @@ object GenerateOrdering
protected def bind(in: Seq[SortOrder], inputSchema: Seq[Attribute]): Seq[SortOrder] =
in.map(BindReferences.bindReference(_, inputSchema))
- protected def create(ordering: Seq[SortOrder]): Ordering[catalyst.InternalRow] = {
+ protected def create(ordering: Seq[SortOrder]): Ordering[InternalRow] = {
val a = newTermName("a")
val b = newTermName("b")
val ctx = newCodeGenContext()
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala
index 9e191dc2e9..dad4364bdd 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala
@@ -24,20 +24,20 @@ import org.apache.spark.sql.catalyst.expressions._
* Interface for generated predicate
*/
abstract class Predicate {
- def eval(r: catalyst.InternalRow): Boolean
+ def eval(r: InternalRow): Boolean
}
/**
* Generates bytecode that evaluates a boolean [[Expression]] on a given input [[InternalRow]].
*/
-object GeneratePredicate extends CodeGenerator[Expression, (catalyst.InternalRow) => Boolean] {
+object GeneratePredicate extends CodeGenerator[Expression, (InternalRow) => Boolean] {
protected def canonicalize(in: Expression): Expression = ExpressionCanonicalizer.execute(in)
protected def bind(in: Expression, inputSchema: Seq[Attribute]): Expression =
BindReferences.bindReference(in, inputSchema)
- protected def create(predicate: Expression): ((catalyst.InternalRow) => Boolean) = {
+ protected def create(predicate: Expression): ((InternalRow) => Boolean) = {
val ctx = newCodeGenContext()
val eval = predicate.gen(ctx)
val code = s"""
@@ -66,6 +66,6 @@ object GeneratePredicate extends CodeGenerator[Expression, (catalyst.InternalRow
// fetch the only one method `generate(Expression[])`
val m = c.getDeclaredMethods()(0)
val p = m.invoke(c.newInstance(), ctx.references.toArray).asInstanceOf[Predicate]
- (r: catalyst.InternalRow) => p.eval(r)
+ (r: InternalRow) => p.eval(r)
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala
index a6913cc03c..1aaf9b309e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala
@@ -42,7 +42,7 @@ case class CreateArray(children: Seq[Expression]) extends Expression {
override def nullable: Boolean = false
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
children.map(_.eval(input))
}
@@ -70,7 +70,7 @@ case class CreateStruct(children: Seq[NamedExpression]) extends Expression {
override def nullable: Boolean = false
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
InternalRow(children.map(_.eval(input)): _*)
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionals.scala
index a119c31300..1d7393d3d9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionals.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionals.scala
@@ -43,7 +43,7 @@ case class If(predicate: Expression, trueValue: Expression, falseValue: Expressi
override def dataType: DataType = trueValue.dataType
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
if (true == predicate.eval(input)) {
trueValue.eval(input)
} else {
@@ -138,7 +138,7 @@ case class CaseWhen(branches: Seq[Expression]) extends CaseWhenLike {
}
/** Written in imperative fashion for performance considerations. */
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
val len = branchesArr.length
var i = 0
// If all branches fail and an elseVal is not provided, the whole statement
@@ -230,7 +230,7 @@ case class CaseKeyWhen(key: Expression, branches: Seq[Expression]) extends CaseW
}
/** Written in imperative fashion for performance considerations. */
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
val evaluatedKey = key.eval(input)
val len = branchesArr.length
var i = 0
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala
index de8b66bc3b..2bc893af02 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala
@@ -29,7 +29,7 @@ case class UnscaledValue(child: Expression) extends UnaryExpression {
override def nullable: Boolean = child.nullable
override def toString: String = s"UnscaledValue($child)"
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
val childResult = child.eval(input)
if (childResult == null) {
null
@@ -51,7 +51,7 @@ case class MakeDecimal(child: Expression, precision: Int, scale: Int) extends Un
override def nullable: Boolean = child.nullable
override def toString: String = s"MakeDecimal($child,$precision,$scale)"
- override def eval(input: catalyst.InternalRow): Decimal = {
+ override def eval(input: InternalRow): Decimal = {
val childResult = child.eval(input)
if (childResult == null) {
null
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
index a80c255a29..f30cb42d12 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
@@ -54,13 +54,13 @@ abstract class Generator extends Expression {
def elementTypes: Seq[(DataType, Boolean)]
/** Should be implemented by child classes to perform specific Generators. */
- override def eval(input: catalyst.InternalRow): TraversableOnce[catalyst.InternalRow]
+ override def eval(input: InternalRow): TraversableOnce[InternalRow]
/**
* Notifies that there are no more rows to process, clean up code, and additional
* rows can be made here.
*/
- def terminate(): TraversableOnce[catalyst.InternalRow] = Nil
+ def terminate(): TraversableOnce[InternalRow] = Nil
}
/**
@@ -68,22 +68,22 @@ abstract class Generator extends Expression {
*/
case class UserDefinedGenerator(
elementTypes: Seq[(DataType, Boolean)],
- function: catalyst.InternalRow => TraversableOnce[catalyst.InternalRow],
+ function: InternalRow => TraversableOnce[InternalRow],
children: Seq[Expression])
extends Generator {
@transient private[this] var inputRow: InterpretedProjection = _
- @transient private[this] var convertToScala: (catalyst.InternalRow) => catalyst.InternalRow = _
+ @transient private[this] var convertToScala: (InternalRow) => InternalRow = _
private def initializeConverters(): Unit = {
inputRow = new InterpretedProjection(children)
convertToScala = {
val inputSchema = StructType(children.map(e => StructField(e.simpleString, e.dataType, true)))
CatalystTypeConverters.createToScalaConverter(inputSchema)
- }.asInstanceOf[(catalyst.InternalRow => catalyst.InternalRow)]
+ }.asInstanceOf[(InternalRow => InternalRow)]
}
- override def eval(input: catalyst.InternalRow): TraversableOnce[catalyst.InternalRow] = {
+ override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
if (inputRow == null) {
initializeConverters()
}
@@ -109,7 +109,7 @@ case class Explode(child: Expression)
case MapType(kt, vt, valueContainsNull) => (kt, false) :: (vt, valueContainsNull) :: Nil
}
- override def eval(input: catalyst.InternalRow): TraversableOnce[catalyst.InternalRow] = {
+ override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
child.dataType match {
case ArrayType(_, _) =>
val inputArray = child.eval(input).asInstanceOf[Seq[Any]]
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
index d8fff2b84d..6c86a47ba2 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
@@ -88,7 +88,7 @@ case class Literal protected (value: Any, dataType: DataType) extends LeafExpres
case _ => false
}
- override def eval(input: catalyst.InternalRow): Any = value
+ override def eval(input: InternalRow): Any = value
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
// change the isNull and primitive to consts, to inline them
@@ -143,9 +143,9 @@ case class Literal protected (value: Any, dataType: DataType) extends LeafExpres
case class MutableLiteral(var value: Any, dataType: DataType, nullable: Boolean = true)
extends LeafExpression {
- def update(expression: Expression, input: catalyst.InternalRow): Unit = {
+ def update(expression: Expression, input: InternalRow): Unit = {
value = expression.eval(input)
}
- override def eval(input: catalyst.InternalRow): Any = value
+ override def eval(input: InternalRow): Any = value
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala
index 6f90d607dd..42c596b5b3 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala
@@ -35,7 +35,7 @@ abstract class LeafMathExpression(c: Double, name: String)
override def nullable: Boolean = false
override def toString: String = s"$name()"
- override def eval(input: catalyst.InternalRow): Any = c
+ override def eval(input: InternalRow): Any = c
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
s"""
@@ -61,7 +61,7 @@ abstract class UnaryMathExpression(f: Double => Double, name: String)
override def nullable: Boolean = true
override def toString: String = s"$name($child)"
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
val evalE = child.eval(input)
if (evalE == null) {
null
@@ -104,7 +104,7 @@ abstract class BinaryMathExpression(f: (Double, Double) => Double, name: String)
override def dataType: DataType = DoubleType
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
val evalE1 = left.eval(input)
if (evalE1 == null) {
null
@@ -216,7 +216,7 @@ case class ToRadians(child: Expression) extends UnaryMathExpression(math.toRadia
case class Atan2(left: Expression, right: Expression)
extends BinaryMathExpression(math.atan2, "ATAN2") {
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
val evalE1 = left.eval(input)
if (evalE1 == null) {
null
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
index 20505129e9..f22c8a7f6a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
@@ -115,7 +115,7 @@ case class Alias(child: Expression, name: String)(
// Alias(Generator, xx) need to be transformed into Generate(generator, ...)
override lazy val resolved = childrenResolved && !child.isInstanceOf[Generator]
- override def eval(input: catalyst.InternalRow): Any = child.eval(input)
+ override def eval(input: InternalRow): Any = child.eval(input)
override def gen(ctx: CodeGenContext): GeneratedExpressionCode = child.gen(ctx)
@@ -231,7 +231,7 @@ case class AttributeReference(
}
// Unresolved attributes are transient at compile time and don't get evaluated during execution.
- override def eval(input: catalyst.InternalRow = null): Any =
+ override def eval(input: InternalRow = null): Any =
throw new TreeNodeException(this, s"No function to evaluate expression. type: ${this.nodeName}")
override def toString: String = s"$name#${exprId.id}$typeSuffix"
@@ -253,7 +253,7 @@ case class PrettyAttribute(name: String) extends Attribute with trees.LeafNode[E
override def withName(newName: String): Attribute = throw new UnsupportedOperationException
override def qualifiers: Seq[String] = throw new UnsupportedOperationException
override def exprId: ExprId = throw new UnsupportedOperationException
- override def eval(input: catalyst.InternalRow): Any = throw new UnsupportedOperationException
+ override def eval(input: InternalRow): Any = throw new UnsupportedOperationException
override def nullable: Boolean = throw new UnsupportedOperationException
override def dataType: DataType = NullType
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala
index 292d626f01..0d06589a79 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala
@@ -17,10 +17,9 @@
package org.apache.spark.sql.catalyst.expressions
-import org.apache.spark.sql.catalyst
-import org.apache.spark.sql.catalyst.expressions.codegen.{GeneratedExpressionCode, CodeGenContext}
-import org.apache.spark.sql.catalyst.trees
import org.apache.spark.sql.catalyst.analysis.UnresolvedException
+import org.apache.spark.sql.catalyst.expressions.codegen.{CodeGenContext, GeneratedExpressionCode}
+import org.apache.spark.sql.catalyst.trees
import org.apache.spark.sql.types.DataType
case class Coalesce(children: Seq[Expression]) extends Expression {
@@ -44,7 +43,7 @@ case class Coalesce(children: Seq[Expression]) extends Expression {
this, s"Coalesce cannot have children of different types. $childTypes")
}
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
var i = 0
var result: Any = null
val childIterator = children.iterator
@@ -78,7 +77,7 @@ case class IsNull(child: Expression) extends Predicate with trees.UnaryNode[Expr
override def foldable: Boolean = child.foldable
override def nullable: Boolean = false
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
child.eval(input) == null
}
@@ -97,7 +96,7 @@ case class IsNotNull(child: Expression) extends Predicate with trees.UnaryNode[E
override def nullable: Boolean = false
override def toString: String = s"IS NOT NULL $child"
- override def eval(input: catalyst.InternalRow): Any = {
+ override def eval(input: InternalRow): Any = {
child.eval(input) != null
}
@@ -119,7 +118,7 @@ case class AtLeastNNonNulls(n: Int, children: Seq[Expression]) extends Predicate
private[this] val childrenArray = children.toArray
- override def eval(input: catalyst.InternalRow): Boolean = {
+ override def eval(input: InternalRow): Boolean = {
var numNonNulls = 0
var i = 0
while (i < childrenArray.length && numNonNulls < n) {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/package.scala
index c2e57b4715..d24d74e7b8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/package.scala
@@ -17,8 +17,6 @@
package org.apache.spark.sql.catalyst
-import org.apache.spark.sql.catalyst
-
/**
* A set of classes that can be used to represent trees of relational expressions. A key goal of
* the expression library is to hide the details of naming and scoping from developers who want to
@@ -51,9 +49,9 @@ import org.apache.spark.sql.catalyst
*/
package object expressions {
- type InternalRow = catalyst.InternalRow
+ type InternalRow = org.apache.spark.sql.catalyst.InternalRow
- val InternalRow = catalyst.InternalRow
+ val InternalRow = org.apache.spark.sql.catalyst.InternalRow
/**
* Converts a [[InternalRow]] to another Row given a sequence of expression that define each
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
index f1acdfeea5..9ca168881c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
@@ -1060,7 +1060,7 @@ class DataFrame private[sql](
def rowFunction(row: Row): TraversableOnce[InternalRow] = {
f(row(0).asInstanceOf[A]).map(o =>
- catalyst.InternalRow(CatalystTypeConverters.convertToCatalyst(o, dataType)))
+ InternalRow(CatalystTypeConverters.convertToCatalyst(o, dataType)))
}
val generator = UserDefinedGenerator(elementTypes, rowFunction, apply(inputColumn).expr :: Nil)
@@ -1232,11 +1232,11 @@ class DataFrame private[sql](
// Pivot the data so each summary is one row
row.grouped(outputCols.size).toSeq.zip(statistics).map {
case (aggregation, (statistic, _)) =>
- catalyst.InternalRow(statistic :: aggregation.toList: _*)
+ InternalRow(statistic :: aggregation.toList: _*)
}
} else {
// If there are no output columns, just output a single column that contains the stats.
- statistics.map { case (name, _) => catalyst.InternalRow(name) }
+ statistics.map { case (name, _) => InternalRow(name) }
}
// All columns are string type
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashOuterJoin.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashOuterJoin.scala
index 19aef9978e..bce0e8d70a 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashOuterJoin.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashOuterJoin.scala
@@ -90,7 +90,7 @@ case class HashOuterJoin(
case r if boundCondition(joinedRow.withRight(r)) => joinedRow.copy()
}
if (temp.size == 0) {
- joinedRow.withRight(rightNullRow).copy.asInstanceOf[InternalRow] :: Nil
+ joinedRow.withRight(rightNullRow).copy :: Nil
} else {
temp
}