aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-07-28 09:43:12 -0700
committerReynold Xin <rxin@databricks.com>2015-07-28 09:43:12 -0700
commitc740bed17215a9608c9eb9d80ffdf0fcf72c3911 (patch)
tree3a9bacc9b1491708366b1528f8069dcc92312543 /sql
parent5a2330e546074013ef706ac09028626912ec5475 (diff)
downloadspark-c740bed17215a9608c9eb9d80ffdf0fcf72c3911.tar.gz
spark-c740bed17215a9608c9eb9d80ffdf0fcf72c3911.tar.bz2
spark-c740bed17215a9608c9eb9d80ffdf0fcf72c3911.zip
[SPARK-9373][SQL] follow up for StructType support in Tungsten projection.
Author: Reynold Xin <rxin@databricks.com> Closes #7720 from rxin/struct-followup and squashes the following commits: d9757f5 [Reynold Xin] [SPARK-9373][SQL] follow up for StructType support in Tungsten projection.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala40
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala3
3 files changed, 23 insertions, 26 deletions
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java
index 8fdd739960..32faad3740 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRowWriters.java
@@ -47,7 +47,7 @@ public class UnsafeRowWriters {
target.getBaseObject(), offset + ((numBytes >> 3) << 3), 0L);
}
- // Write the string to the variable length portion.
+ // Write the bytes to the variable length portion.
input.writeToMemory(target.getBaseObject(), offset);
// Set the fixed length portion.
@@ -73,7 +73,7 @@ public class UnsafeRowWriters {
target.getBaseObject(), offset + ((numBytes >> 3) << 3), 0L);
}
- // Write the string to the variable length portion.
+ // Write the bytes to the variable length portion.
ByteArray.writeToMemory(input, target.getBaseObject(), offset);
// Set the fixed length portion.
@@ -115,7 +115,7 @@ public class UnsafeRowWriters {
target.getBaseObject(), offset + ((numBytes >> 3) << 3), 0L);
}
- // Write the string to the variable length portion.
+ // Write the bytes to the variable length portion.
row.writeToMemory(target.getBaseObject(), offset);
// Set the fixed length portion.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala
index 3e87f72858..9a4c00e86a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala
@@ -62,14 +62,10 @@ object GenerateUnsafeProjection extends CodeGenerator[Seq[Expression], UnsafePro
val cursor = ctx.freshName("cursor")
val numBytes = ctx.freshName("numBytes")
- val exprs = expressions.zipWithIndex.map { case (e, i) =>
- e.dataType match {
- case st: StructType =>
- createCodeForStruct(ctx, e.gen(ctx), st)
- case _ =>
- e.gen(ctx)
- }
- }
+ val exprs = expressions.map { e => e.dataType match {
+ case st: StructType => createCodeForStruct(ctx, e.gen(ctx), st)
+ case _ => e.gen(ctx)
+ }}
val allExprs = exprs.map(_.code).mkString("\n")
val fixedSize = 8 * exprs.length + UnsafeRow.calculateBitSetWidthInBytes(exprs.length)
@@ -153,20 +149,20 @@ object GenerateUnsafeProjection extends CodeGenerator[Seq[Expression], UnsafePro
val exprs: Seq[GeneratedExpressionCode] = schema.map(_.dataType).zipWithIndex.map {
case (dt, i) => dt match {
- case st: StructType =>
- val nestedStructEv = GeneratedExpressionCode(
- code = "",
- isNull = s"${input.primitive}.isNullAt($i)",
- primitive = s"${ctx.getColumn(input.primitive, dt, i)}"
- )
- createCodeForStruct(ctx, nestedStructEv, st)
- case _ =>
- GeneratedExpressionCode(
- code = "",
- isNull = s"${input.primitive}.isNullAt($i)",
- primitive = s"${ctx.getColumn(input.primitive, dt, i)}"
- )
- }
+ case st: StructType =>
+ val nestedStructEv = GeneratedExpressionCode(
+ code = "",
+ isNull = s"${input.primitive}.isNullAt($i)",
+ primitive = s"${ctx.getColumn(input.primitive, dt, i)}"
+ )
+ createCodeForStruct(ctx, nestedStructEv, st)
+ case _ =>
+ GeneratedExpressionCode(
+ code = "",
+ isNull = s"${input.primitive}.isNullAt($i)",
+ primitive = s"${ctx.getColumn(input.primitive, dt, i)}"
+ )
+ }
}
val allExprs = exprs.map(_.code).mkString("\n")
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
index 314b85f126..f3ef066528 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
@@ -339,7 +339,8 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
* if necessary.
*/
def getSortOperator(sortExprs: Seq[SortOrder], global: Boolean, child: SparkPlan): SparkPlan = {
- if (sqlContext.conf.unsafeEnabled && UnsafeExternalSort.supportsSchema(child.schema)) {
+ if (sqlContext.conf.unsafeEnabled && sqlContext.conf.codegenEnabled &&
+ UnsafeExternalSort.supportsSchema(child.schema)) {
execution.UnsafeExternalSort(sortExprs, global, child)
} else if (sqlContext.conf.externalSortEnabled) {
execution.ExternalSort(sortExprs, global, child)