aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorMichael Armbrust <michael@databricks.com>2015-10-07 10:17:29 -0700
committerMichael Armbrust <michael@databricks.com>2015-10-07 10:17:29 -0700
commitf5d154bc731aedfc2eecdb4ed6af8cac820511c9 (patch)
tree77d132adc5870ff6fbe861a5cca6913e3f50ef59 /sql
parent9672602c7ecb8117a1edb04067e2e3e776ee10d2 (diff)
downloadspark-f5d154bc731aedfc2eecdb4ed6af8cac820511c9.tar.gz
spark-f5d154bc731aedfc2eecdb4ed6af8cac820511c9.tar.bz2
spark-f5d154bc731aedfc2eecdb4ed6af8cac820511c9.zip
[SPARK-10966] [SQL] Codegen framework cleanup
This PR is mostly cosmetic and cleans up some warts in codegen (nearly all of which were inherited from the original quasiquote version). - Add lines numbers to errors (in stacktraces when debug logging is on, and always for compile fails) - Use a variable for input row instead of hardcoding "i" everywhere - rename `primitive` -> `value` (since its often actually an object) Author: Michael Armbrust <michael@databricks.com> Closes #9006 from marmbrus/codegen-cleanup.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala18
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/InputFileName.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/MonotonicallyIncreasingID.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SortOrder.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SparkPartitionID.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala62
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala47
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodegenFallback.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala8
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala10
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateSafeProjection.scala14
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala20
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala16
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala8
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala36
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala62
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala14
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala68
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala10
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala24
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala34
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/randomExpressions.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala24
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/sets.scala10
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala64
31 files changed, 306 insertions, 291 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala
index 473b9b7870..ff1f28ddbb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala
@@ -68,10 +68,10 @@ case class BoundReference(ordinal: Int, dataType: DataType, nullable: Boolean)
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val javaType = ctx.javaType(dataType)
- val value = ctx.getValue("i", dataType, ordinal.toString)
+ val value = ctx.getValue(ctx.INPUT_ROW, dataType, ordinal.toString)
s"""
- boolean ${ev.isNull} = i.isNullAt($ordinal);
- $javaType ${ev.primitive} = ${ev.isNull} ? ${ctx.defaultValue(dataType)} : ($value);
+ boolean ${ev.isNull} = ${ctx.INPUT_ROW}.isNullAt($ordinal);
+ $javaType ${ev.value} = ${ev.isNull} ? ${ctx.defaultValue(dataType)} : ($value);
"""
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
index f0bce388d9..99d7444dc4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
@@ -438,7 +438,7 @@ case class Cast(child: Expression, dataType: DataType)
val eval = child.gen(ctx)
val nullSafeCast = nullSafeCastFunction(child.dataType, dataType, ctx)
eval.code +
- castCode(ctx, eval.primitive, eval.isNull, ev.primitive, ev.isNull, dataType, nullSafeCast)
+ castCode(ctx, eval.value, eval.isNull, ev.value, ev.isNull, dataType, nullSafeCast)
}
// three function arguments are: child.primitive, result.primitive and result.isNull
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
index 0b98f555a1..96284b9b42 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
@@ -276,7 +276,7 @@ abstract class UnaryExpression extends Expression {
ev: GeneratedExpressionCode,
f: String => String): String = {
nullSafeCodeGen(ctx, ev, eval => {
- s"${ev.primitive} = ${f(eval)};"
+ s"${ev.value} = ${f(eval)};"
})
}
@@ -292,10 +292,10 @@ abstract class UnaryExpression extends Expression {
ev: GeneratedExpressionCode,
f: String => String): String = {
val eval = child.gen(ctx)
- val resultCode = f(eval.primitive)
+ val resultCode = f(eval.value)
eval.code + s"""
boolean ${ev.isNull} = ${eval.isNull};
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
$resultCode
}
@@ -357,7 +357,7 @@ abstract class BinaryExpression extends Expression {
ev: GeneratedExpressionCode,
f: (String, String) => String): String = {
nullSafeCodeGen(ctx, ev, (eval1, eval2) => {
- s"${ev.primitive} = ${f(eval1, eval2)};"
+ s"${ev.value} = ${f(eval1, eval2)};"
})
}
@@ -375,11 +375,11 @@ abstract class BinaryExpression extends Expression {
f: (String, String) => String): String = {
val eval1 = left.gen(ctx)
val eval2 = right.gen(ctx)
- val resultCode = f(eval1.primitive, eval2.primitive)
+ val resultCode = f(eval1.value, eval2.value)
s"""
${eval1.code}
boolean ${ev.isNull} = ${eval1.isNull};
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
${eval2.code}
if (!${eval2.isNull}) {
@@ -482,7 +482,7 @@ abstract class TernaryExpression extends Expression {
ev: GeneratedExpressionCode,
f: (String, String, String) => String): String = {
nullSafeCodeGen(ctx, ev, (eval1, eval2, eval3) => {
- s"${ev.primitive} = ${f(eval1, eval2, eval3)};"
+ s"${ev.value} = ${f(eval1, eval2, eval3)};"
})
}
@@ -499,11 +499,11 @@ abstract class TernaryExpression extends Expression {
ev: GeneratedExpressionCode,
f: (String, String, String) => String): String = {
val evals = children.map(_.gen(ctx))
- val resultCode = f(evals(0).primitive, evals(1).primitive, evals(2).primitive)
+ val resultCode = f(evals(0).value, evals(1).value, evals(2).value)
s"""
${evals(0).code}
boolean ${ev.isNull} = true;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${evals(0).isNull}) {
${evals(1).code}
if (!${evals(1).isNull}) {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/InputFileName.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/InputFileName.scala
index 1e74f71695..d809877817 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/InputFileName.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/InputFileName.scala
@@ -42,7 +42,7 @@ case class InputFileName() extends LeafExpression with Nondeterministic {
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
ev.isNull = "false"
- s"final ${ctx.javaType(dataType)} ${ev.primitive} = " +
+ s"final ${ctx.javaType(dataType)} ${ev.value} = " +
"org.apache.spark.rdd.SqlNewHadoopRDD.getInputFileName();"
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/MonotonicallyIncreasingID.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/MonotonicallyIncreasingID.scala
index 291b7a5bc3..2d7679fdfe 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/MonotonicallyIncreasingID.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/MonotonicallyIncreasingID.scala
@@ -66,7 +66,7 @@ private[sql] case class MonotonicallyIncreasingID() extends LeafExpression with
ev.isNull = "false"
s"""
- final ${ctx.javaType(dataType)} ${ev.primitive} = $partitionMaskTerm + $countTerm;
+ final ${ctx.javaType(dataType)} ${ev.value} = $partitionMaskTerm + $countTerm;
$countTerm++;
"""
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SortOrder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SortOrder.scala
index 98e029035a..290c128d65 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SortOrder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SortOrder.scala
@@ -63,7 +63,7 @@ case class SortPrefix(child: SortOrder) extends UnaryExpression {
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val childCode = child.child.gen(ctx)
- val input = childCode.primitive
+ val input = childCode.value
val BinaryPrefixCmp = classOf[BinaryPrefixComparator].getName
val DoublePrefixCmp = classOf[DoublePrefixComparator].getName
@@ -97,10 +97,10 @@ case class SortPrefix(child: SortOrder) extends UnaryExpression {
childCode.code +
s"""
- |long ${ev.primitive} = ${nullValue}L;
+ |long ${ev.value} = ${nullValue}L;
|boolean ${ev.isNull} = false;
|if (!${childCode.isNull}) {
- | ${ev.primitive} = $prefixCode;
+ | ${ev.value} = $prefixCode;
|}
""".stripMargin
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SparkPartitionID.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SparkPartitionID.scala
index 4b1772a2de..8bff173d64 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SparkPartitionID.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SparkPartitionID.scala
@@ -47,6 +47,6 @@ private[sql] case class SparkPartitionID() extends LeafExpression with Nondeterm
ctx.addMutableState(ctx.JAVA_INT, idTerm,
s"$idTerm = org.apache.spark.TaskContext.getPartitionId();")
ev.isNull = "false"
- s"final ${ctx.javaType(dataType)} ${ev.primitive} = $idTerm;"
+ s"final ${ctx.javaType(dataType)} ${ev.value} = $idTerm;"
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
index 98464edf4d..61a17fd7db 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
@@ -42,7 +42,7 @@ case class UnaryMinus(child: Expression) extends UnaryExpression with ExpectsInp
// for example, we could not write --9223372036854775808L in code
s"""
${ctx.javaType(dt)} $originValue = (${ctx.javaType(dt)})($eval);
- ${ev.primitive} = (${ctx.javaType(dt)})(-($originValue));
+ ${ev.value} = (${ctx.javaType(dt)})(-($originValue));
"""})
case dt: CalendarIntervalType => defineCodeGen(ctx, ev, c => s"$c.negate()")
}
@@ -223,20 +223,20 @@ case class Divide(left: Expression, right: Expression) extends BinaryArithmetic
val eval1 = left.gen(ctx)
val eval2 = right.gen(ctx)
val isZero = if (dataType.isInstanceOf[DecimalType]) {
- s"${eval2.primitive}.isZero()"
+ s"${eval2.value}.isZero()"
} else {
- s"${eval2.primitive} == 0"
+ s"${eval2.value} == 0"
}
val javaType = ctx.javaType(dataType)
val divide = if (dataType.isInstanceOf[DecimalType]) {
- s"${eval1.primitive}.$decimalMethod(${eval2.primitive})"
+ s"${eval1.value}.$decimalMethod(${eval2.value})"
} else {
- s"($javaType)(${eval1.primitive} $symbol ${eval2.primitive})"
+ s"($javaType)(${eval1.value} $symbol ${eval2.value})"
}
s"""
${eval2.code}
boolean ${ev.isNull} = false;
- $javaType ${ev.primitive} = ${ctx.defaultValue(javaType)};
+ $javaType ${ev.value} = ${ctx.defaultValue(javaType)};
if (${eval2.isNull} || $isZero) {
${ev.isNull} = true;
} else {
@@ -244,7 +244,7 @@ case class Divide(left: Expression, right: Expression) extends BinaryArithmetic
if (${eval1.isNull}) {
${ev.isNull} = true;
} else {
- ${ev.primitive} = $divide;
+ ${ev.value} = $divide;
}
}
"""
@@ -285,20 +285,20 @@ case class Remainder(left: Expression, right: Expression) extends BinaryArithmet
val eval1 = left.gen(ctx)
val eval2 = right.gen(ctx)
val isZero = if (dataType.isInstanceOf[DecimalType]) {
- s"${eval2.primitive}.isZero()"
+ s"${eval2.value}.isZero()"
} else {
- s"${eval2.primitive} == 0"
+ s"${eval2.value} == 0"
}
val javaType = ctx.javaType(dataType)
val remainder = if (dataType.isInstanceOf[DecimalType]) {
- s"${eval1.primitive}.$decimalMethod(${eval2.primitive})"
+ s"${eval1.value}.$decimalMethod(${eval2.value})"
} else {
- s"($javaType)(${eval1.primitive} $symbol ${eval2.primitive})"
+ s"($javaType)(${eval1.value} $symbol ${eval2.value})"
}
s"""
${eval2.code}
boolean ${ev.isNull} = false;
- $javaType ${ev.primitive} = ${ctx.defaultValue(javaType)};
+ $javaType ${ev.value} = ${ctx.defaultValue(javaType)};
if (${eval2.isNull} || $isZero) {
${ev.isNull} = true;
} else {
@@ -306,7 +306,7 @@ case class Remainder(left: Expression, right: Expression) extends BinaryArithmet
if (${eval1.isNull}) {
${ev.isNull} = true;
} else {
- ${ev.primitive} = $remainder;
+ ${ev.value} = $remainder;
}
}
"""
@@ -341,24 +341,24 @@ case class MaxOf(left: Expression, right: Expression) extends BinaryArithmetic {
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val eval1 = left.gen(ctx)
val eval2 = right.gen(ctx)
- val compCode = ctx.genComp(dataType, eval1.primitive, eval2.primitive)
+ val compCode = ctx.genComp(dataType, eval1.value, eval2.value)
eval1.code + eval2.code + s"""
boolean ${ev.isNull} = false;
- ${ctx.javaType(left.dataType)} ${ev.primitive} =
+ ${ctx.javaType(left.dataType)} ${ev.value} =
${ctx.defaultValue(left.dataType)};
if (${eval1.isNull}) {
${ev.isNull} = ${eval2.isNull};
- ${ev.primitive} = ${eval2.primitive};
+ ${ev.value} = ${eval2.value};
} else if (${eval2.isNull}) {
${ev.isNull} = ${eval1.isNull};
- ${ev.primitive} = ${eval1.primitive};
+ ${ev.value} = ${eval1.value};
} else {
if ($compCode > 0) {
- ${ev.primitive} = ${eval1.primitive};
+ ${ev.value} = ${eval1.value};
} else {
- ${ev.primitive} = ${eval2.primitive};
+ ${ev.value} = ${eval2.value};
}
}
"""
@@ -395,24 +395,24 @@ case class MinOf(left: Expression, right: Expression) extends BinaryArithmetic {
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val eval1 = left.gen(ctx)
val eval2 = right.gen(ctx)
- val compCode = ctx.genComp(dataType, eval1.primitive, eval2.primitive)
+ val compCode = ctx.genComp(dataType, eval1.value, eval2.value)
eval1.code + eval2.code + s"""
boolean ${ev.isNull} = false;
- ${ctx.javaType(left.dataType)} ${ev.primitive} =
+ ${ctx.javaType(left.dataType)} ${ev.value} =
${ctx.defaultValue(left.dataType)};
if (${eval1.isNull}) {
${ev.isNull} = ${eval2.isNull};
- ${ev.primitive} = ${eval2.primitive};
+ ${ev.value} = ${eval2.value};
} else if (${eval2.isNull}) {
${ev.isNull} = ${eval1.isNull};
- ${ev.primitive} = ${eval1.primitive};
+ ${ev.value} = ${eval1.value};
} else {
if ($compCode < 0) {
- ${ev.primitive} = ${eval1.primitive};
+ ${ev.value} = ${eval1.value};
} else {
- ${ev.primitive} = ${eval2.primitive};
+ ${ev.value} = ${eval2.value};
}
}
"""
@@ -451,9 +451,9 @@ case class Pmod(left: Expression, right: Expression) extends BinaryArithmetic {
s"""
${ctx.javaType(dataType)} r = $eval1.remainder($eval2);
if (r.compare(new org.apache.spark.sql.types.Decimal().set(0)) < 0) {
- ${ev.primitive} = (r.$decimalAdd($eval2)).remainder($eval2);
+ ${ev.value} = (r.$decimalAdd($eval2)).remainder($eval2);
} else {
- ${ev.primitive} = r;
+ ${ev.value} = r;
}
"""
// byte and short are casted into int when add, minus, times or divide
@@ -461,18 +461,18 @@ case class Pmod(left: Expression, right: Expression) extends BinaryArithmetic {
s"""
${ctx.javaType(dataType)} r = (${ctx.javaType(dataType)})($eval1 % $eval2);
if (r < 0) {
- ${ev.primitive} = (${ctx.javaType(dataType)})((r + $eval2) % $eval2);
+ ${ev.value} = (${ctx.javaType(dataType)})((r + $eval2) % $eval2);
} else {
- ${ev.primitive} = r;
+ ${ev.value} = r;
}
"""
case _ =>
s"""
${ctx.javaType(dataType)} r = $eval1 % $eval2;
if (r < 0) {
- ${ev.primitive} = (r + $eval2) % $eval2;
+ ${ev.value} = (r + $eval2) % $eval2;
} else {
- ${ev.primitive} = r;
+ ${ev.value} = r;
}
"""
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
index 9a28781133..2dd680454b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
@@ -42,10 +42,10 @@ class LongHashSet extends org.apache.spark.util.collection.OpenHashSet[Long]
* @param code The sequence of statements required to evaluate the expression.
* @param isNull A term that holds a boolean value representing whether the expression evaluated
* to null.
- * @param primitive A term for a possible primitive value of the result of the evaluation. Not
- * valid if `isNull` is set to `true`.
+ * @param value A term for a (possibly primitive) value of the result of the evaluation. Not
+ * valid if `isNull` is set to `true`.
*/
-case class GeneratedExpressionCode(var code: String, var isNull: String, var primitive: String)
+case class GeneratedExpressionCode(var code: String, var isNull: String, var value: String)
/**
* A context for codegen, which is used to bookkeeping the expressions those are not supported
@@ -99,6 +99,9 @@ class CodeGenContext {
final val JAVA_FLOAT = "float"
final val JAVA_DOUBLE = "double"
+ /** The variable name of the input row in generated code. */
+ final val INPUT_ROW = "i"
+
private val curId = new java.util.concurrent.atomic.AtomicInteger()
/**
@@ -112,21 +115,21 @@ class CodeGenContext {
}
/**
- * Returns the code to access a value in `SpecializedGetters` for a given DataType.
+ * Returns the specialized code to access a value from `inputRow` at `ordinal`.
*/
- def getValue(getter: String, dataType: DataType, ordinal: String): String = {
+ def getValue(input: String, dataType: DataType, ordinal: String): String = {
val jt = javaType(dataType)
dataType match {
- case _ if isPrimitiveType(jt) => s"$getter.get${primitiveTypeName(jt)}($ordinal)"
- case t: DecimalType => s"$getter.getDecimal($ordinal, ${t.precision}, ${t.scale})"
- case StringType => s"$getter.getUTF8String($ordinal)"
- case BinaryType => s"$getter.getBinary($ordinal)"
- case CalendarIntervalType => s"$getter.getInterval($ordinal)"
- case t: StructType => s"$getter.getStruct($ordinal, ${t.size})"
- case _: ArrayType => s"$getter.getArray($ordinal)"
- case _: MapType => s"$getter.getMap($ordinal)"
+ case _ if isPrimitiveType(jt) => s"$input.get${primitiveTypeName(jt)}($ordinal)"
+ case t: DecimalType => s"$input.getDecimal($ordinal, ${t.precision}, ${t.scale})"
+ case StringType => s"$input.getUTF8String($ordinal)"
+ case BinaryType => s"$input.getBinary($ordinal)"
+ case CalendarIntervalType => s"$input.getInterval($ordinal)"
+ case t: StructType => s"$input.getStruct($ordinal, ${t.size})"
+ case _: ArrayType => s"$input.getArray($ordinal)"
+ case _: MapType => s"$input.getMap($ordinal)"
case NullType => "null"
- case _ => s"($jt)$getter.get($ordinal, null)"
+ case _ => s"($jt)$input.get($ordinal, null)"
}
}
@@ -384,11 +387,23 @@ abstract class CodeGenerator[InType <: AnyRef, OutType <: AnyRef] extends Loggin
classOf[UnsafeMapData].getName
))
evaluator.setExtendedClass(classOf[GeneratedClass])
+
+ def formatted = CodeFormatter.format(code)
+ def withLineNums = formatted.split("\n").zipWithIndex.map {
+ case (l, n) => f"${n + 1}%03d $l"
+ }.mkString("\n")
+
+ logDebug({
+ // Only add extra debugging info to byte code when we are going to print the source code.
+ evaluator.setDebuggingInformation(false, true, false)
+ withLineNums
+ })
+
try {
- evaluator.cook(code)
+ evaluator.cook("generated.java", code)
} catch {
case e: Exception =>
- val msg = s"failed to compile: $e\n" + CodeFormatter.format(code)
+ val msg = s"failed to compile: $e\n$withLineNums"
logError(msg, e)
throw new Exception(msg, e)
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodegenFallback.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodegenFallback.scala
index 3492d2c618..d51a8dede7 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodegenFallback.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodegenFallback.scala
@@ -34,11 +34,11 @@ trait CodegenFallback extends Expression {
val objectTerm = ctx.freshName("obj")
s"""
/* expression: ${this} */
- Object $objectTerm = expressions[${ctx.references.size - 1}].eval(i);
+ Object $objectTerm = expressions[${ctx.references.size - 1}].eval(${ctx.INPUT_ROW});
boolean ${ev.isNull} = $objectTerm == null;
- ${ctx.javaType(this.dataType)} ${ev.primitive} = ${ctx.defaultValue(this.dataType)};
+ ${ctx.javaType(this.dataType)} ${ev.value} = ${ctx.defaultValue(this.dataType)};
if (!${ev.isNull}) {
- ${ev.primitive} = (${ctx.boxedType(this.dataType)}) $objectTerm;
+ ${ev.value} = (${ctx.boxedType(this.dataType)}) $objectTerm;
}
"""
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala
index 793023b9fb..d82d19185b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala
@@ -49,7 +49,7 @@ object GenerateMutableProjection extends CodeGenerator[Seq[Expression], () => Mu
if (${evaluationCode.isNull}) {
${ctx.setColumn("mutableRow", e.dataType, i, null)};
} else {
- ${ctx.setColumn("mutableRow", e.dataType, i, evaluationCode.primitive)};
+ ${ctx.setColumn("mutableRow", e.dataType, i, evaluationCode.value)};
}
"""
} else {
@@ -58,12 +58,12 @@ object GenerateMutableProjection extends CodeGenerator[Seq[Expression], () => Mu
if (${evaluationCode.isNull}) {
mutableRow.setNullAt($i);
} else {
- ${ctx.setColumn("mutableRow", e.dataType, i, evaluationCode.primitive)};
+ ${ctx.setColumn("mutableRow", e.dataType, i, evaluationCode.value)};
}
"""
}
}
- val allProjections = ctx.splitExpressions("i", projectionCodes)
+ val allProjections = ctx.splitExpressions(ctx.INPUT_ROW, projectionCodes)
val code = s"""
public Object generate($exprType[] expr) {
@@ -94,7 +94,7 @@ object GenerateMutableProjection extends CodeGenerator[Seq[Expression], () => Mu
}
public Object apply(Object _i) {
- InternalRow i = (InternalRow) _i;
+ InternalRow ${ctx.INPUT_ROW} = (InternalRow) _i;
$allProjections
return mutableRow;
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala
index 42be394c3b..c2b420286f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala
@@ -74,21 +74,21 @@ object GenerateOrdering extends CodeGenerator[Seq[SortOrder], Ordering[InternalR
val isNullB = ctx.freshName("isNullB")
val primitiveB = ctx.freshName("primitiveB")
s"""
- i = a;
+ ${ctx.INPUT_ROW} = a;
boolean $isNullA;
${ctx.javaType(order.child.dataType)} $primitiveA;
{
${eval.code}
$isNullA = ${eval.isNull};
- $primitiveA = ${eval.primitive};
+ $primitiveA = ${eval.value};
}
- i = b;
+ ${ctx.INPUT_ROW} = b;
boolean $isNullB;
${ctx.javaType(order.child.dataType)} $primitiveB;
{
${eval.code}
$isNullB = ${eval.isNull};
- $primitiveB = ${eval.primitive};
+ $primitiveB = ${eval.value};
}
if ($isNullA && $isNullB) {
// Nothing
@@ -128,7 +128,7 @@ object GenerateOrdering extends CodeGenerator[Seq[SortOrder], Ordering[InternalR
@Override
public int compare(InternalRow a, InternalRow b) {
- InternalRow i = null; // Holds current row being evaluated.
+ InternalRow ${ctx.INPUT_ROW} = null; // Holds current row being evaluated.
$comparisons
return 0;
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala
index c7e718a526..ae6ffe6293 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala
@@ -56,9 +56,9 @@ object GeneratePredicate extends CodeGenerator[Expression, (InternalRow) => Bool
}
@Override
- public boolean eval(InternalRow i) {
+ public boolean eval(InternalRow ${ctx.INPUT_ROW}) {
${eval.code}
- return !${eval.isNull} && ${eval.primitive};
+ return !${eval.isNull} && ${eval.value};
}
}"""
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
index 75524b568d..dbcc9dc084 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
@@ -59,7 +59,7 @@ object GenerateProjection extends CodeGenerator[Seq[Expression], Projection] {
${eval.code}
nullBits[$i] = ${eval.isNull};
if (!${eval.isNull}) {
- c$i = ${eval.primitive};
+ c$i = ${eval.value};
}
}
"""
@@ -180,7 +180,7 @@ object GenerateProjection extends CodeGenerator[Seq[Expression], Projection] {
$columns
- public SpecificRow(InternalRow i) {
+ public SpecificRow(InternalRow ${ctx.INPUT_ROW}) {
$initColumns
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateSafeProjection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateSafeProjection.scala
index 7ad352d7ce..ea09e029da 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateSafeProjection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateSafeProjection.scala
@@ -51,7 +51,7 @@ object GenerateSafeProjection extends CodeGenerator[Seq[Expression], Projection]
s"""
if (!$tmp.isNullAt($i)) {
${converter.code}
- $values[$i] = ${converter.primitive};
+ $values[$i] = ${converter.value};
}
"""
}
@@ -85,7 +85,7 @@ object GenerateSafeProjection extends CodeGenerator[Seq[Expression], Projection]
for (int $index = 0; $index < $numElements; $index++) {
if (!$tmp.isNullAt($index)) {
${elementConverter.code}
- $values[$index] = ${elementConverter.primitive};
+ $values[$index] = ${elementConverter.value};
}
}
final ArrayData $output = new $arrayClass($values);
@@ -109,7 +109,7 @@ object GenerateSafeProjection extends CodeGenerator[Seq[Expression], Projection]
final MapData $tmp = $input;
${keyConverter.code}
${valueConverter.code}
- final MapData $output = new $mapClass(${keyConverter.primitive}, ${valueConverter.primitive});
+ final MapData $output = new $mapClass(${keyConverter.value}, ${valueConverter.value});
"""
GeneratedExpressionCode(code, "false", output)
@@ -133,18 +133,18 @@ object GenerateSafeProjection extends CodeGenerator[Seq[Expression], Projection]
case (NoOp, _) => ""
case (e, i) =>
val evaluationCode = e.gen(ctx)
- val converter = convertToSafe(ctx, evaluationCode.primitive, e.dataType)
+ val converter = convertToSafe(ctx, evaluationCode.value, e.dataType)
evaluationCode.code +
s"""
if (${evaluationCode.isNull}) {
mutableRow.setNullAt($i);
} else {
${converter.code}
- ${ctx.setColumn("mutableRow", e.dataType, i, converter.primitive)};
+ ${ctx.setColumn("mutableRow", e.dataType, i, converter.value)};
}
"""
}
- val allExpressions = ctx.splitExpressions("i", expressionCodes)
+ val allExpressions = ctx.splitExpressions(ctx.INPUT_ROW, expressionCodes)
val code = s"""
public Object generate($exprType[] expr) {
return new SpecificSafeProjection(expr);
@@ -164,7 +164,7 @@ object GenerateSafeProjection extends CodeGenerator[Seq[Expression], Projection]
}
public Object apply(Object _i) {
- InternalRow i = (InternalRow) _i;
+ InternalRow ${ctx.INPUT_ROW} = (InternalRow) _i;
$allExpressions
return mutableRow;
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala
index 8e58cb9ad1..3e0e81733f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala
@@ -93,7 +93,7 @@ object GenerateUnsafeProjection extends CodeGenerator[Seq[Expression], UnsafePro
// Remember the current cursor so that we can calculate how many bytes are
// written later.
final int $tmpCursor = $bufferHolder.cursor;
- ${writeStructToBuffer(ctx, input.primitive, t.map(_.dataType), bufferHolder)}
+ ${writeStructToBuffer(ctx, input.value, t.map(_.dataType), bufferHolder)}
$rowWriter.setOffsetAndSize($index, $tmpCursor, $bufferHolder.cursor - $tmpCursor);
"""
@@ -102,7 +102,7 @@ object GenerateUnsafeProjection extends CodeGenerator[Seq[Expression], UnsafePro
// Remember the current cursor so that we can calculate how many bytes are
// written later.
final int $tmpCursor = $bufferHolder.cursor;
- ${writeArrayToBuffer(ctx, input.primitive, et, bufferHolder)}
+ ${writeArrayToBuffer(ctx, input.value, et, bufferHolder)}
$rowWriter.setOffsetAndSize($index, $tmpCursor, $bufferHolder.cursor - $tmpCursor);
$rowWriter.alignToWords($bufferHolder.cursor - $tmpCursor);
"""
@@ -112,7 +112,7 @@ object GenerateUnsafeProjection extends CodeGenerator[Seq[Expression], UnsafePro
// Remember the current cursor so that we can calculate how many bytes are
// written later.
final int $tmpCursor = $bufferHolder.cursor;
- ${writeMapToBuffer(ctx, input.primitive, kt, vt, bufferHolder)}
+ ${writeMapToBuffer(ctx, input.value, kt, vt, bufferHolder)}
$rowWriter.setOffsetAndSize($index, $tmpCursor, $bufferHolder.cursor - $tmpCursor);
$rowWriter.alignToWords($bufferHolder.cursor - $tmpCursor);
"""
@@ -122,19 +122,19 @@ object GenerateUnsafeProjection extends CodeGenerator[Seq[Expression], UnsafePro
s"""
final long $fieldOffset = $rowWriter.getFieldOffset($index);
Platform.putLong($bufferHolder.buffer, $fieldOffset, 0L);
- ${writePrimitiveType(ctx, input.primitive, dt, s"$bufferHolder.buffer", fieldOffset)}
+ ${writePrimitiveType(ctx, input.value, dt, s"$bufferHolder.buffer", fieldOffset)}
"""
case t: DecimalType if t.precision <= Decimal.MAX_LONG_DIGITS =>
- s"$rowWriter.writeCompactDecimal($index, ${input.primitive}, " +
+ s"$rowWriter.writeCompactDecimal($index, ${input.value}, " +
s"${t.precision}, ${t.scale});"
case t: DecimalType =>
- s"$rowWriter.write($index, ${input.primitive}, ${t.precision}, ${t.scale});"
+ s"$rowWriter.write($index, ${input.value}, ${t.precision}, ${t.scale});"
case NullType => ""
- case _ => s"$rowWriter.write($index, ${input.primitive});"
+ case _ => s"$rowWriter.write($index, ${input.value});"
}
s"""
@@ -324,7 +324,7 @@ object GenerateUnsafeProjection extends CodeGenerator[Seq[Expression], UnsafePro
val code =
s"""
$bufferHolder.reset();
- ${writeExpressionsToBuffer(ctx, "i", exprEvals, exprTypes, bufferHolder)}
+ ${writeExpressionsToBuffer(ctx, ctx.INPUT_ROW, exprEvals, exprTypes, bufferHolder)}
$result.pointTo($bufferHolder.buffer, ${expressions.length}, $bufferHolder.totalSize());
"""
GeneratedExpressionCode(code, "false", result)
@@ -363,9 +363,9 @@ object GenerateUnsafeProjection extends CodeGenerator[Seq[Expression], UnsafePro
return apply((InternalRow) row);
}
- public UnsafeRow apply(InternalRow i) {
+ public UnsafeRow apply(InternalRow ${ctx.INPUT_ROW}) {
${eval.code}
- return ${eval.primitive};
+ return ${eval.value};
}
}
"""
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
index 7b8c5b723d..75c66bc271 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
@@ -35,7 +35,7 @@ case class Size(child: Expression) extends UnaryExpression with ExpectsInputType
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
- nullSafeCodeGen(ctx, ev, c => s"${ev.primitive} = ($c).numElements();")
+ nullSafeCodeGen(ctx, ev, c => s"${ev.value} = ($c).numElements();")
}
}
@@ -173,7 +173,7 @@ case class ArrayContains(left: Expression, right: Expression)
${ev.isNull} = true;
} else if (${ctx.genEqual(right.dataType, value, getValue)}) {
${ev.isNull} = false;
- ${ev.primitive} = true;
+ ${ev.value} = true;
break;
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala
index 82eab5fb3d..a5f02e2463 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala
@@ -59,11 +59,11 @@ case class CreateArray(children: Seq[Expression]) extends Expression {
if (${eval.isNull}) {
$values[$i] = null;
} else {
- $values[$i] = ${eval.primitive};
+ $values[$i] = ${eval.value};
}
"""
}.mkString("\n") +
- s"final ArrayData ${ev.primitive} = new $arrayClass($values);"
+ s"final ArrayData ${ev.value} = new $arrayClass($values);"
}
override def prettyName: String = "array"
@@ -107,11 +107,11 @@ case class CreateStruct(children: Seq[Expression]) extends Expression {
if (${eval.isNull}) {
$values[$i] = null;
} else {
- $values[$i] = ${eval.primitive};
+ $values[$i] = ${eval.value};
}
"""
}.mkString("\n") +
- s"final InternalRow ${ev.primitive} = new $rowClass($values);"
+ s"final InternalRow ${ev.value} = new $rowClass($values);"
}
override def prettyName: String = "struct"
@@ -176,11 +176,11 @@ case class CreateNamedStruct(children: Seq[Expression]) extends Expression {
if (${eval.isNull}) {
$values[$i] = null;
} else {
- $values[$i] = ${eval.primitive};
+ $values[$i] = ${eval.value};
}
"""
}.mkString("\n") +
- s"final InternalRow ${ev.primitive} = new $rowClass($values);"
+ s"final InternalRow ${ev.value} = new $rowClass($values);"
}
override def prettyName: String = "named_struct"
@@ -218,7 +218,7 @@ case class CreateStructUnsafe(children: Seq[Expression]) extends Expression {
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val eval = GenerateUnsafeProjection.createCode(ctx, children)
ev.isNull = eval.isNull
- ev.primitive = eval.primitive
+ ev.value = eval.value
eval.code
}
@@ -258,7 +258,7 @@ case class CreateNamedStructUnsafe(children: Seq[Expression]) extends Expression
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val eval = GenerateUnsafeProjection.createCode(ctx, valExprs)
ev.isNull = eval.isNull
- ev.primitive = eval.primitive
+ ev.value = eval.value
eval.code
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala
index 9927da21b0..a2b5a6a580 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala
@@ -113,7 +113,7 @@ case class GetStructField(child: Expression, field: StructField, ordinal: Int)
if ($eval.isNullAt($ordinal)) {
${ev.isNull} = true;
} else {
- ${ev.primitive} = ${ctx.getValue(eval, dataType, ordinal.toString)};
+ ${ev.value} = ${ctx.getValue(eval, dataType, ordinal.toString)};
}
"""
})
@@ -175,7 +175,7 @@ case class GetArrayStructFields(
}
}
}
- ${ev.primitive} = new $arrayClass(values);
+ ${ev.value} = new $arrayClass(values);
"""
})
}
@@ -219,7 +219,7 @@ case class GetArrayItem(child: Expression, ordinal: Expression)
if (index >= $eval1.numElements() || index < 0) {
${ev.isNull} = true;
} else {
- ${ev.primitive} = ${ctx.getValue(eval1, dataType, "index")};
+ ${ev.value} = ${ctx.getValue(eval1, dataType, "index")};
}
"""
})
@@ -295,7 +295,7 @@ case class GetMapValue(child: Expression, key: Expression)
}
if ($found) {
- ${ev.primitive} = ${ctx.getValue(eval1 + ".valueArray()", dataType, index)};
+ ${ev.value} = ${ctx.getValue(eval1 + ".valueArray()", dataType, index)};
} else {
${ev.isNull} = true;
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
index d51f3d3cef..d532629984 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
@@ -60,15 +60,15 @@ case class If(predicate: Expression, trueValue: Expression, falseValue: Expressi
s"""
${condEval.code}
boolean ${ev.isNull} = false;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
- if (!${condEval.isNull} && ${condEval.primitive}) {
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
+ if (!${condEval.isNull} && ${condEval.value}) {
${trueEval.code}
${ev.isNull} = ${trueEval.isNull};
- ${ev.primitive} = ${trueEval.primitive};
+ ${ev.value} = ${trueEval.value};
} else {
${falseEval.code}
${ev.isNull} = ${falseEval.isNull};
- ${ev.primitive} = ${falseEval.primitive};
+ ${ev.value} = ${falseEval.value};
}
"""
}
@@ -166,11 +166,11 @@ case class CaseWhen(branches: Seq[Expression]) extends CaseWhenLike {
s"""
if (!$got) {
${cond.code}
- if (!${cond.isNull} && ${cond.primitive}) {
+ if (!${cond.isNull} && ${cond.value}) {
$got = true;
${res.code}
${ev.isNull} = ${res.isNull};
- ${ev.primitive} = ${res.primitive};
+ ${ev.value} = ${res.value};
}
}
"""
@@ -182,7 +182,7 @@ case class CaseWhen(branches: Seq[Expression]) extends CaseWhenLike {
if (!$got) {
${res.code}
${ev.isNull} = ${res.isNull};
- ${ev.primitive} = ${res.primitive};
+ ${ev.value} = ${res.value};
}
"""
} else {
@@ -192,7 +192,7 @@ case class CaseWhen(branches: Seq[Expression]) extends CaseWhenLike {
s"""
boolean $got = false;
boolean ${ev.isNull} = true;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
$cases
$other
"""
@@ -267,11 +267,11 @@ case class CaseKeyWhen(key: Expression, branches: Seq[Expression]) extends CaseW
s"""
if (!$got) {
${cond.code}
- if (!${cond.isNull} && ${ctx.genEqual(key.dataType, keyEval.primitive, cond.primitive)}) {
+ if (!${cond.isNull} && ${ctx.genEqual(key.dataType, keyEval.value, cond.value)}) {
$got = true;
${res.code}
${ev.isNull} = ${res.isNull};
- ${ev.primitive} = ${res.primitive};
+ ${ev.value} = ${res.value};
}
}
"""
@@ -283,7 +283,7 @@ case class CaseKeyWhen(key: Expression, branches: Seq[Expression]) extends CaseW
if (!$got) {
${res.code}
${ev.isNull} = ${res.isNull};
- ${ev.primitive} = ${res.primitive};
+ ${ev.value} = ${res.value};
}
"""
} else {
@@ -293,7 +293,7 @@ case class CaseKeyWhen(key: Expression, branches: Seq[Expression]) extends CaseW
s"""
boolean $got = false;
boolean ${ev.isNull} = true;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
${keyEval.code}
if (!${keyEval.isNull}) {
$cases
@@ -351,15 +351,15 @@ case class Least(children: Seq[Expression]) extends Expression {
def updateEval(i: Int): String =
s"""
if (!${evalChildren(i).isNull} && (${ev.isNull} ||
- ${ctx.genComp(dataType, evalChildren(i).primitive, ev.primitive)} < 0)) {
+ ${ctx.genComp(dataType, evalChildren(i).value, ev.value)} < 0)) {
${ev.isNull} = false;
- ${ev.primitive} = ${evalChildren(i).primitive};
+ ${ev.value} = ${evalChildren(i).value};
}
"""
s"""
${evalChildren.map(_.code).mkString("\n")}
boolean ${ev.isNull} = true;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
${children.indices.map(updateEval).mkString("\n")}
"""
}
@@ -406,15 +406,15 @@ case class Greatest(children: Seq[Expression]) extends Expression {
def updateEval(i: Int): String =
s"""
if (!${evalChildren(i).isNull} && (${ev.isNull} ||
- ${ctx.genComp(dataType, evalChildren(i).primitive, ev.primitive)} > 0)) {
+ ${ctx.genComp(dataType, evalChildren(i).value, ev.value)} > 0)) {
${ev.isNull} = false;
- ${ev.primitive} = ${evalChildren(i).primitive};
+ ${ev.value} = ${evalChildren(i).value};
}
"""
s"""
${evalChildren.map(_.code).mkString("\n")}
boolean ${ev.isNull} = true;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
${children.indices.map(updateEval).mkString("\n")}
"""
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
index 32dc9b7682..13cc6bb6f2 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
@@ -82,7 +82,7 @@ case class DateAdd(startDate: Expression, days: Expression)
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, (sd, d) => {
- s"""${ev.primitive} = $sd + $d;"""
+ s"""${ev.value} = $sd + $d;"""
})
}
}
@@ -105,7 +105,7 @@ case class DateSub(startDate: Expression, days: Expression)
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, (sd, d) => {
- s"""${ev.primitive} = $sd - $d;"""
+ s"""${ev.value} = $sd - $d;"""
})
}
}
@@ -269,7 +269,7 @@ case class WeekOfYear(child: Expression) extends UnaryExpression with ImplicitCa
""")
s"""
$c.setTimeInMillis($time * 1000L * 3600L * 24L);
- ${ev.primitive} = $c.get($cal.WEEK_OF_YEAR);
+ ${ev.value} = $c.get($cal.WEEK_OF_YEAR);
"""
})
}
@@ -368,19 +368,19 @@ case class UnixTimestamp(timeExp: Expression, format: Expression)
if (fString == null) {
s"""
boolean ${ev.isNull} = true;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
"""
} else {
val eval1 = left.gen(ctx)
s"""
${eval1.code}
boolean ${ev.isNull} = ${eval1.isNull};
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
try {
$sdf $formatter = new $sdf("$fString");
- ${ev.primitive} =
- $formatter.parse(${eval1.primitive}.toString()).getTime() / 1000L;
+ ${ev.value} =
+ $formatter.parse(${eval1.value}.toString()).getTime() / 1000L;
} catch (java.lang.Throwable e) {
${ev.isNull} = true;
}
@@ -392,7 +392,7 @@ case class UnixTimestamp(timeExp: Expression, format: Expression)
nullSafeCodeGen(ctx, ev, (string, format) => {
s"""
try {
- ${ev.primitive} =
+ ${ev.value} =
(new $sdf($format.toString())).parse($string.toString()).getTime() / 1000L;
} catch (java.lang.Throwable e) {
${ev.isNull} = true;
@@ -404,9 +404,9 @@ case class UnixTimestamp(timeExp: Expression, format: Expression)
s"""
${eval1.code}
boolean ${ev.isNull} = ${eval1.isNull};
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
- ${ev.primitive} = ${eval1.primitive} / 1000000L;
+ ${ev.value} = ${eval1.value} / 1000000L;
}
"""
case DateType =>
@@ -415,9 +415,9 @@ case class UnixTimestamp(timeExp: Expression, format: Expression)
s"""
${eval1.code}
boolean ${ev.isNull} = ${eval1.isNull};
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
- ${ev.primitive} = $dtu.daysToMillis(${eval1.primitive}) / 1000L;
+ ${ev.value} = $dtu.daysToMillis(${eval1.value}) / 1000L;
}
"""
}
@@ -477,18 +477,18 @@ case class FromUnixTime(sec: Expression, format: Expression)
if (constFormat == null) {
s"""
boolean ${ev.isNull} = true;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
"""
} else {
val t = left.gen(ctx)
s"""
${t.code}
boolean ${ev.isNull} = ${t.isNull};
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
try {
- ${ev.primitive} = UTF8String.fromString(new $sdf("${constFormat.toString}").format(
- new java.util.Date(${t.primitive} * 1000L)));
+ ${ev.value} = UTF8String.fromString(new $sdf("${constFormat.toString}").format(
+ new java.util.Date(${t.value} * 1000L)));
} catch (java.lang.Throwable e) {
${ev.isNull} = true;
}
@@ -499,7 +499,7 @@ case class FromUnixTime(sec: Expression, format: Expression)
nullSafeCodeGen(ctx, ev, (seconds, f) => {
s"""
try {
- ${ev.primitive} = UTF8String.fromString((new $sdf($f.toString())).format(
+ ${ev.value} = UTF8String.fromString((new $sdf($f.toString())).format(
new java.util.Date($seconds * 1000L)));
} catch (java.lang.Throwable e) {
${ev.isNull} = true;
@@ -571,7 +571,7 @@ case class NextDay(startDate: Expression, dayOfWeek: Expression)
} else {
val dayOfWeekValue = DateTimeUtils.getDayOfWeekFromString(input)
s"""
- |${ev.primitive} = $dateTimeUtilClass.getNextDateForDayOfWeek($sd, $dayOfWeekValue);
+ |${ev.value} = $dateTimeUtilClass.getNextDateForDayOfWeek($sd, $dayOfWeekValue);
""".stripMargin
}
} else {
@@ -580,7 +580,7 @@ case class NextDay(startDate: Expression, dayOfWeek: Expression)
|if ($dayOfWeekTerm == -1) {
| ${ev.isNull} = true;
|} else {
- | ${ev.primitive} = $dateTimeUtilClass.getNextDateForDayOfWeek($sd, $dayOfWeekTerm);
+ | ${ev.value} = $dateTimeUtilClass.getNextDateForDayOfWeek($sd, $dayOfWeekTerm);
|}
""".stripMargin
}
@@ -640,7 +640,7 @@ case class FromUTCTimestamp(left: Expression, right: Expression)
if (tz == null) {
s"""
|boolean ${ev.isNull} = true;
- |long ${ev.primitive} = 0;
+ |long ${ev.value} = 0;
""".stripMargin
} else {
val tzTerm = ctx.freshName("tz")
@@ -650,10 +650,10 @@ case class FromUTCTimestamp(left: Expression, right: Expression)
s"""
|${eval.code}
|boolean ${ev.isNull} = ${eval.isNull};
- |long ${ev.primitive} = 0;
+ |long ${ev.value} = 0;
|if (!${ev.isNull}) {
- | ${ev.primitive} = ${eval.primitive} +
- | ${tzTerm}.getOffset(${eval.primitive} / 1000) * 1000L;
+ | ${ev.value} = ${eval.value} +
+ | ${tzTerm}.getOffset(${eval.value} / 1000) * 1000L;
|}
""".stripMargin
}
@@ -765,7 +765,7 @@ case class ToUTCTimestamp(left: Expression, right: Expression)
if (tz == null) {
s"""
|boolean ${ev.isNull} = true;
- |long ${ev.primitive} = 0;
+ |long ${ev.value} = 0;
""".stripMargin
} else {
val tzTerm = ctx.freshName("tz")
@@ -775,10 +775,10 @@ case class ToUTCTimestamp(left: Expression, right: Expression)
s"""
|${eval.code}
|boolean ${ev.isNull} = ${eval.isNull};
- |long ${ev.primitive} = 0;
+ |long ${ev.value} = 0;
|if (!${ev.isNull}) {
- | ${ev.primitive} = ${eval.primitive} -
- | ${tzTerm}.getOffset(${eval.primitive} / 1000) * 1000L;
+ | ${ev.value} = ${eval.value} -
+ | ${tzTerm}.getOffset(${eval.value} / 1000) * 1000L;
|}
""".stripMargin
}
@@ -849,16 +849,16 @@ case class TruncDate(date: Expression, format: Expression)
if (truncLevel == -1) {
s"""
boolean ${ev.isNull} = true;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
"""
} else {
val d = date.gen(ctx)
s"""
${d.code}
boolean ${ev.isNull} = ${d.isNull};
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
- ${ev.primitive} = $dtu.truncDate(${d.primitive}, $truncLevel);
+ ${ev.value} = $dtu.truncDate(${d.value}, $truncLevel);
}
"""
}
@@ -870,7 +870,7 @@ case class TruncDate(date: Expression, format: Expression)
if ($form == -1) {
${ev.isNull} = true;
} else {
- ${ev.primitive} = $dtu.truncDate($dateVal, $form);
+ ${ev.value} = $dtu.truncDate($dateVal, $form);
}
"""
})
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala
index b7be12f7aa..78f6631e46 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala
@@ -55,8 +55,8 @@ case class MakeDecimal(child: Expression, precision: Int, scale: Int) extends Un
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, eval => {
s"""
- ${ev.primitive} = (new Decimal()).setOrNull($eval, $precision, $scale);
- ${ev.isNull} = ${ev.primitive} == null;
+ ${ev.value} = (new Decimal()).setOrNull($eval, $precision, $scale);
+ ${ev.isNull} = ${ev.value} == null;
"""
})
}
@@ -97,7 +97,7 @@ case class CheckOverflow(child: Expression, dataType: DecimalType) extends Unary
s"""
| Decimal $tmp = $eval.clone();
| if ($tmp.changePrecision(${dataType.precision}, ${dataType.scale})) {
- | ${ev.primitive} = $tmp;
+ | ${ev.value} = $tmp;
| } else {
| ${ev.isNull} = true;
| }
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
index 8c0c5d5b1e..51be819e9d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
@@ -97,12 +97,12 @@ case class Literal protected (value: Any, dataType: DataType)
// change the isNull and primitive to consts, to inline them
if (value == null) {
ev.isNull = "true"
- s"final ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};"
+ s"final ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};"
} else {
dataType match {
case BooleanType =>
ev.isNull = "false"
- ev.primitive = value.toString
+ ev.value = value.toString
""
case FloatType =>
val v = value.asInstanceOf[Float]
@@ -110,7 +110,7 @@ case class Literal protected (value: Any, dataType: DataType)
super.genCode(ctx, ev)
} else {
ev.isNull = "false"
- ev.primitive = s"${value}f"
+ ev.value = s"${value}f"
""
}
case DoubleType =>
@@ -119,20 +119,20 @@ case class Literal protected (value: Any, dataType: DataType)
super.genCode(ctx, ev)
} else {
ev.isNull = "false"
- ev.primitive = s"${value}D"
+ ev.value = s"${value}D"
""
}
case ByteType | ShortType =>
ev.isNull = "false"
- ev.primitive = s"(${ctx.javaType(dataType)})$value"
+ ev.value = s"(${ctx.javaType(dataType)})$value"
""
case IntegerType | DateType =>
ev.isNull = "false"
- ev.primitive = value.toString
+ ev.value = value.toString
""
case TimestampType | LongType =>
ev.isNull = "false"
- ev.primitive = s"${value}L"
+ ev.value = s"${value}L"
""
// eval() version may be faster for non-primitive types
case other =>
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala
index 39de0e8f44..a8164e9e29 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala
@@ -89,7 +89,7 @@ abstract class UnaryLogExpression(f: Double => Double, name: String)
if ($c <= $yAsymptote) {
${ev.isNull} = true;
} else {
- ${ev.primitive} = java.lang.Math.${funcName}($c);
+ ${ev.value} = java.lang.Math.${funcName}($c);
}
"""
)
@@ -191,8 +191,8 @@ case class Conv(numExpr: Expression, fromBaseExpr: Expression, toBaseExpr: Expre
val numconv = NumberConverter.getClass.getName.stripSuffix("$")
nullSafeCodeGen(ctx, ev, (num, from, to) =>
s"""
- ${ev.primitive} = $numconv.convert($num.getBytes(), $from, $to);
- if (${ev.primitive} == null) {
+ ${ev.value} = $numconv.convert($num.getBytes(), $from, $to);
+ if (${ev.value} == null) {
${ev.isNull} = true;
}
"""
@@ -270,7 +270,7 @@ case class Factorial(child: Expression) extends UnaryExpression with ImplicitCas
if ($eval > 20 || $eval < 0) {
${ev.isNull} = true;
} else {
- ${ev.primitive} =
+ ${ev.value} =
org.apache.spark.sql.catalyst.expressions.Factorial.factorial($eval);
}
"""
@@ -288,7 +288,7 @@ case class Log2(child: Expression)
if ($c <= $yAsymptote) {
${ev.isNull} = true;
} else {
- ${ev.primitive} = java.lang.Math.log($c) / java.lang.Math.log(2);
+ ${ev.value} = java.lang.Math.log($c) / java.lang.Math.log(2);
}
"""
)
@@ -432,7 +432,7 @@ case class Hex(child: Expression) extends UnaryExpression with ImplicitCastInput
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, (c) => {
val hex = Hex.getClass.getName.stripSuffix("$")
- s"${ev.primitive} = " + (child.dataType match {
+ s"${ev.value} = " + (child.dataType match {
case StringType => s"""$hex.hex($c.getBytes());"""
case _ => s"""$hex.hex($c);"""
})
@@ -458,8 +458,8 @@ case class Unhex(child: Expression) extends UnaryExpression with ImplicitCastInp
nullSafeCodeGen(ctx, ev, (c) => {
val hex = Hex.getClass.getName.stripSuffix("$")
s"""
- ${ev.primitive} = $hex.unhex($c.getBytes());
- ${ev.isNull} = ${ev.primitive} == null;
+ ${ev.value} = $hex.unhex($c.getBytes());
+ ${ev.isNull} = ${ev.value} == null;
"""
})
}
@@ -605,7 +605,7 @@ case class Logarithm(left: Expression, right: Expression)
if ($c2 <= 0.0) {
${ev.isNull} = true;
} else {
- ${ev.primitive} = java.lang.Math.log($c2);
+ ${ev.value} = java.lang.Math.log($c2);
}
""")
} else {
@@ -614,7 +614,7 @@ case class Logarithm(left: Expression, right: Expression)
if ($c1 <= 0.0 || $c2 <= 0.0) {
${ev.isNull} = true;
} else {
- ${ev.primitive} = java.lang.Math.log($c2) / java.lang.Math.log($c1);
+ ${ev.value} = java.lang.Math.log($c2) / java.lang.Math.log($c1);
}
""")
}
@@ -727,74 +727,74 @@ case class Round(child: Expression, scale: Expression)
val evaluationCode = child.dataType match {
case _: DecimalType =>
s"""
- if (${ce.primitive}.changePrecision(${ce.primitive}.precision(), ${_scale})) {
- ${ev.primitive} = ${ce.primitive};
+ if (${ce.value}.changePrecision(${ce.value}.precision(), ${_scale})) {
+ ${ev.value} = ${ce.value};
} else {
${ev.isNull} = true;
}"""
case ByteType =>
if (_scale < 0) {
s"""
- ${ev.primitive} = new java.math.BigDecimal(${ce.primitive}).
+ ${ev.value} = new java.math.BigDecimal(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.ROUND_HALF_UP).byteValue();"""
} else {
- s"${ev.primitive} = ${ce.primitive};"
+ s"${ev.value} = ${ce.value};"
}
case ShortType =>
if (_scale < 0) {
s"""
- ${ev.primitive} = new java.math.BigDecimal(${ce.primitive}).
+ ${ev.value} = new java.math.BigDecimal(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.ROUND_HALF_UP).shortValue();"""
} else {
- s"${ev.primitive} = ${ce.primitive};"
+ s"${ev.value} = ${ce.value};"
}
case IntegerType =>
if (_scale < 0) {
s"""
- ${ev.primitive} = new java.math.BigDecimal(${ce.primitive}).
+ ${ev.value} = new java.math.BigDecimal(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.ROUND_HALF_UP).intValue();"""
} else {
- s"${ev.primitive} = ${ce.primitive};"
+ s"${ev.value} = ${ce.value};"
}
case LongType =>
if (_scale < 0) {
s"""
- ${ev.primitive} = new java.math.BigDecimal(${ce.primitive}).
+ ${ev.value} = new java.math.BigDecimal(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.ROUND_HALF_UP).longValue();"""
} else {
- s"${ev.primitive} = ${ce.primitive};"
+ s"${ev.value} = ${ce.value};"
}
case FloatType => // if child eval to NaN or Infinity, just return it.
if (_scale == 0) {
s"""
- if (Float.isNaN(${ce.primitive}) || Float.isInfinite(${ce.primitive})){
- ${ev.primitive} = ${ce.primitive};
+ if (Float.isNaN(${ce.value}) || Float.isInfinite(${ce.value})){
+ ${ev.value} = ${ce.value};
} else {
- ${ev.primitive} = Math.round(${ce.primitive});
+ ${ev.value} = Math.round(${ce.value});
}"""
} else {
s"""
- if (Float.isNaN(${ce.primitive}) || Float.isInfinite(${ce.primitive})){
- ${ev.primitive} = ${ce.primitive};
+ if (Float.isNaN(${ce.value}) || Float.isInfinite(${ce.value})){
+ ${ev.value} = ${ce.value};
} else {
- ${ev.primitive} = java.math.BigDecimal.valueOf(${ce.primitive}).
+ ${ev.value} = java.math.BigDecimal.valueOf(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.ROUND_HALF_UP).floatValue();
}"""
}
case DoubleType => // if child eval to NaN or Infinity, just return it.
if (_scale == 0) {
s"""
- if (Double.isNaN(${ce.primitive}) || Double.isInfinite(${ce.primitive})){
- ${ev.primitive} = ${ce.primitive};
+ if (Double.isNaN(${ce.value}) || Double.isInfinite(${ce.value})){
+ ${ev.value} = ${ce.value};
} else {
- ${ev.primitive} = Math.round(${ce.primitive});
+ ${ev.value} = Math.round(${ce.value});
}"""
} else {
s"""
- if (Double.isNaN(${ce.primitive}) || Double.isInfinite(${ce.primitive})){
- ${ev.primitive} = ${ce.primitive};
+ if (Double.isNaN(${ce.value}) || Double.isInfinite(${ce.value})){
+ ${ev.value} = ${ce.value};
} else {
- ${ev.primitive} = java.math.BigDecimal.valueOf(${ce.primitive}).
+ ${ev.value} = java.math.BigDecimal.valueOf(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.ROUND_HALF_UP).doubleValue();
}"""
}
@@ -803,13 +803,13 @@ case class Round(child: Expression, scale: Expression)
if (scaleV == null) { // if scale is null, no need to eval its child at all
s"""
boolean ${ev.isNull} = true;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
"""
} else {
s"""
${ce.code}
boolean ${ev.isNull} = ${ce.isNull};
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
$evaluationCode
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala
index 8d8d66ddeb..0f6d02f2e0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala
@@ -92,18 +92,18 @@ case class Sha2(left: Expression, right: Expression)
try {
java.security.MessageDigest md = java.security.MessageDigest.getInstance("SHA-224");
md.update($eval1);
- ${ev.primitive} = UTF8String.fromBytes(md.digest());
+ ${ev.value} = UTF8String.fromBytes(md.digest());
} catch (java.security.NoSuchAlgorithmException e) {
${ev.isNull} = true;
}
} else if ($eval2 == 256 || $eval2 == 0) {
- ${ev.primitive} =
+ ${ev.value} =
UTF8String.fromString($digestUtils.sha256Hex($eval1));
} else if ($eval2 == 384) {
- ${ev.primitive} =
+ ${ev.value} =
UTF8String.fromString($digestUtils.sha384Hex($eval1));
} else if ($eval2 == 512) {
- ${ev.primitive} =
+ ${ev.value} =
UTF8String.fromString($digestUtils.sha512Hex($eval1));
} else {
${ev.isNull} = true;
@@ -155,7 +155,7 @@ case class Crc32(child: Expression) extends UnaryExpression with ImplicitCastInp
s"""
$CRC32 checksum = new $CRC32();
checksum.update($value, 0, $value.length);
- ${ev.primitive} = checksum.getValue();
+ ${ev.value} = checksum.getValue();
"""
})
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala
index 287718fab7..94deafb75b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala
@@ -64,7 +64,7 @@ case class Coalesce(children: Seq[Expression]) extends Expression {
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
s"""
boolean ${ev.isNull} = true;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
""" +
children.map { e =>
val eval = e.gen(ctx)
@@ -73,7 +73,7 @@ case class Coalesce(children: Seq[Expression]) extends Expression {
${eval.code}
if (!${eval.isNull}) {
${ev.isNull} = false;
- ${ev.primitive} = ${eval.primitive};
+ ${ev.value} = ${eval.value};
}
}
"""
@@ -111,8 +111,8 @@ case class IsNaN(child: Expression) extends UnaryExpression
s"""
${eval.code}
boolean ${ev.isNull} = false;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
- ${ev.primitive} = !${eval.isNull} && Double.isNaN(${eval.primitive});
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
+ ${ev.value} = !${eval.isNull} && Double.isNaN(${eval.value});
"""
}
}
@@ -152,18 +152,18 @@ case class NaNvl(left: Expression, right: Expression)
s"""
${leftGen.code}
boolean ${ev.isNull} = false;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (${leftGen.isNull}) {
${ev.isNull} = true;
} else {
- if (!Double.isNaN(${leftGen.primitive})) {
- ${ev.primitive} = ${leftGen.primitive};
+ if (!Double.isNaN(${leftGen.value})) {
+ ${ev.value} = ${leftGen.value};
} else {
${rightGen.code}
if (${rightGen.isNull}) {
${ev.isNull} = true;
} else {
- ${ev.primitive} = ${rightGen.primitive};
+ ${ev.value} = ${rightGen.value};
}
}
}
@@ -186,7 +186,7 @@ case class IsNull(child: Expression) extends UnaryExpression with Predicate {
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val eval = child.gen(ctx)
ev.isNull = "false"
- ev.primitive = eval.isNull
+ ev.value = eval.isNull
eval.code
}
}
@@ -205,7 +205,7 @@ case class IsNotNull(child: Expression) extends UnaryExpression with Predicate {
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val eval = child.gen(ctx)
ev.isNull = "false"
- ev.primitive = s"(!(${eval.isNull}))"
+ ev.value = s"(!(${eval.isNull}))"
eval.code
}
}
@@ -249,7 +249,7 @@ case class AtLeastNNonNulls(n: Int, children: Seq[Expression]) extends Predicate
s"""
if ($nonnull < $n) {
${eval.code}
- if (!${eval.isNull} && !Double.isNaN(${eval.primitive})) {
+ if (!${eval.isNull} && !Double.isNaN(${eval.value})) {
$nonnull += 1;
}
}
@@ -269,7 +269,7 @@ case class AtLeastNNonNulls(n: Int, children: Seq[Expression]) extends Predicate
int $nonnull = 0;
$code
boolean ${ev.isNull} = false;
- boolean ${ev.primitive} = $nonnull >= $n;
+ boolean ${ev.value} = $nonnull >= $n;
"""
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
index daefc016bc..68557479a9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
@@ -148,19 +148,19 @@ case class In(value: Expression, list: Seq[Expression]) extends Predicate
val listGen = list.map(_.gen(ctx))
val listCode = listGen.map(x =>
s"""
- if (!${ev.primitive}) {
+ if (!${ev.value}) {
${x.code}
if (${x.isNull}) {
${ev.isNull} = true;
- } else if (${ctx.genEqual(value.dataType, valueGen.primitive, x.primitive)}) {
+ } else if (${ctx.genEqual(value.dataType, valueGen.value, x.value)}) {
${ev.isNull} = false;
- ${ev.primitive} = true;
+ ${ev.value} = true;
}
}
""").mkString("\n")
s"""
${valueGen.code}
- boolean ${ev.primitive} = false;
+ boolean ${ev.value} = false;
boolean ${ev.isNull} = ${valueGen.isNull};
if (!${ev.isNull}) {
$listCode
@@ -208,10 +208,10 @@ case class InSet(child: Expression, hset: Set[Any]) extends UnaryExpression with
s"""
${childGen.code}
boolean ${ev.isNull} = ${childGen.isNull};
- boolean ${ev.primitive} = false;
+ boolean ${ev.value} = false;
if (!${ev.isNull}) {
- ${ev.primitive} = $hsetTerm.contains(${childGen.primitive});
- if (!${ev.primitive} && $hasNullTerm) {
+ ${ev.value} = $hsetTerm.contains(${childGen.value});
+ if (!${ev.value} && $hasNullTerm) {
${ev.isNull} = true;
}
}
@@ -251,14 +251,14 @@ case class And(left: Expression, right: Expression) extends BinaryOperator with
s"""
${eval1.code}
boolean ${ev.isNull} = false;
- boolean ${ev.primitive} = false;
+ boolean ${ev.value} = false;
- if (!${eval1.isNull} && !${eval1.primitive}) {
+ if (!${eval1.isNull} && !${eval1.value}) {
} else {
${eval2.code}
- if (!${eval2.isNull} && !${eval2.primitive}) {
+ if (!${eval2.isNull} && !${eval2.value}) {
} else if (!${eval1.isNull} && !${eval2.isNull}) {
- ${ev.primitive} = true;
+ ${ev.value} = true;
} else {
${ev.isNull} = true;
}
@@ -300,14 +300,14 @@ case class Or(left: Expression, right: Expression) extends BinaryOperator with P
s"""
${eval1.code}
boolean ${ev.isNull} = false;
- boolean ${ev.primitive} = true;
+ boolean ${ev.value} = true;
- if (!${eval1.isNull} && ${eval1.primitive}) {
+ if (!${eval1.isNull} && ${eval1.value}) {
} else {
${eval2.code}
- if (!${eval2.isNull} && ${eval2.primitive}) {
+ if (!${eval2.isNull} && ${eval2.value}) {
} else if (!${eval1.isNull} && !${eval2.isNull}) {
- ${ev.primitive} = false;
+ ${ev.value} = false;
} else {
${ev.isNull} = true;
}
@@ -403,10 +403,10 @@ case class EqualNullSafe(left: Expression, right: Expression) extends BinaryComp
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val eval1 = left.gen(ctx)
val eval2 = right.gen(ctx)
- val equalCode = ctx.genEqual(left.dataType, eval1.primitive, eval2.primitive)
+ val equalCode = ctx.genEqual(left.dataType, eval1.value, eval2.value)
ev.isNull = "false"
eval1.code + eval2.code + s"""
- boolean ${ev.primitive} = (${eval1.isNull} && ${eval2.isNull}) ||
+ boolean ${ev.value} = (${eval1.isNull} && ${eval2.isNull}) ||
(!${eval1.isNull} && $equalCode);
"""
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/randomExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/randomExpressions.scala
index 62d3d204ca..8bde8cb9fe 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/randomExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/randomExpressions.scala
@@ -69,7 +69,7 @@ case class Rand(seed: Long) extends RDG {
s"$rngTerm = new $className(${seed}L + org.apache.spark.TaskContext.getPartitionId());")
ev.isNull = "false"
s"""
- final ${ctx.javaType(dataType)} ${ev.primitive} = $rngTerm.nextDouble();
+ final ${ctx.javaType(dataType)} ${ev.value} = $rngTerm.nextDouble();
"""
}
}
@@ -92,7 +92,7 @@ case class Randn(seed: Long) extends RDG {
s"$rngTerm = new $className(${seed}L + org.apache.spark.TaskContext.getPartitionId());")
ev.isNull = "false"
s"""
- final ${ctx.javaType(dataType)} ${ev.primitive} = $rngTerm.nextGaussian();
+ final ${ctx.javaType(dataType)} ${ev.value} = $rngTerm.nextGaussian();
"""
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala
index 6dff28a7cd..64f15945c7 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala
@@ -92,15 +92,15 @@ case class Like(left: Expression, right: Expression)
s"""
${eval.code}
boolean ${ev.isNull} = ${eval.isNull};
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
- ${ev.primitive} = $pattern.matcher(${eval.primitive}.toString()).matches();
+ ${ev.value} = $pattern.matcher(${eval.value}.toString()).matches();
}
"""
} else {
s"""
boolean ${ev.isNull} = true;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
"""
}
} else {
@@ -108,7 +108,7 @@ case class Like(left: Expression, right: Expression)
s"""
String rightStr = ${eval2}.toString();
${patternClass} $pattern = ${patternClass}.compile($escapeFunc(rightStr));
- ${ev.primitive} = $pattern.matcher(${eval1}.toString()).matches();
+ ${ev.value} = $pattern.matcher(${eval1}.toString()).matches();
"""
})
}
@@ -140,15 +140,15 @@ case class RLike(left: Expression, right: Expression)
s"""
${eval.code}
boolean ${ev.isNull} = ${eval.isNull};
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
- ${ev.primitive} = $pattern.matcher(${eval.primitive}.toString()).find(0);
+ ${ev.value} = $pattern.matcher(${eval.value}.toString()).find(0);
}
"""
} else {
s"""
boolean ${ev.isNull} = true;
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
"""
}
} else {
@@ -156,7 +156,7 @@ case class RLike(left: Expression, right: Expression)
s"""
String rightStr = ${eval2}.toString();
${patternClass} $pattern = ${patternClass}.compile(rightStr);
- ${ev.primitive} = $pattern.matcher(${eval1}.toString()).find(0);
+ ${ev.value} = $pattern.matcher(${eval1}.toString()).find(0);
"""
})
}
@@ -184,7 +184,7 @@ case class StringSplit(str: Expression, pattern: Expression)
val arrayClass = classOf[GenericArrayData].getName
nullSafeCodeGen(ctx, ev, (str, pattern) =>
// Array in java is covariant, so we don't need to cast UTF8String[] to Object[].
- s"""${ev.primitive} = new $arrayClass($str.split($pattern, -1));""")
+ s"""${ev.value} = new $arrayClass($str.split($pattern, -1));""")
}
override def prettyName: String = "split"
@@ -275,7 +275,7 @@ case class RegExpReplace(subject: Expression, regexp: Expression, rep: Expressio
m.appendReplacement(${termResult}, ${termLastReplacement});
}
m.appendTail(${termResult});
- ${ev.primitive} = UTF8String.fromString(${termResult}.toString());
+ ${ev.value} = UTF8String.fromString(${termResult}.toString());
${ev.isNull} = false;
"""
})
@@ -335,10 +335,10 @@ case class RegExpExtract(subject: Expression, regexp: Expression, idx: Expressio
${termPattern}.matcher($subject.toString());
if (m.find()) {
java.util.regex.MatchResult mr = m.toMatchResult();
- ${ev.primitive} = UTF8String.fromString(mr.group($idx));
+ ${ev.value} = UTF8String.fromString(mr.group($idx));
${ev.isNull} = false;
} else {
- ${ev.primitive} = UTF8String.EMPTY_UTF8;
+ ${ev.value} = UTF8String.EMPTY_UTF8;
${ev.isNull} = false;
}"""
})
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/sets.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/sets.scala
index 5b0fe8dfe2..d124d29d53 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/sets.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/sets.scala
@@ -67,7 +67,7 @@ case class NewSet(elementType: DataType) extends LeafExpression with CodegenFall
case IntegerType | LongType =>
ev.isNull = "false"
s"""
- ${ctx.javaType(dataType)} ${ev.primitive} = new ${ctx.javaType(dataType)}();
+ ${ctx.javaType(dataType)} ${ev.value} = new ${ctx.javaType(dataType)}();
"""
case _ => super.genCode(ctx, ev)
}
@@ -116,10 +116,10 @@ case class AddItemToSet(item: Expression, set: Expression)
val htype = ctx.javaType(dataType)
ev.isNull = "false"
- ev.primitive = setEval.primitive
+ ev.value = setEval.value
itemEval.code + setEval.code + s"""
if (!${itemEval.isNull} && !${setEval.isNull}) {
- (($htype)${setEval.primitive}).add(${itemEval.primitive});
+ (($htype)${setEval.value}).add(${itemEval.value});
}
"""
case _ => super.genCode(ctx, ev)
@@ -167,10 +167,10 @@ case class CombineSets(left: Expression, right: Expression)
val htype = ctx.javaType(dataType)
ev.isNull = leftEval.isNull
- ev.primitive = leftEval.primitive
+ ev.value = leftEval.value
leftEval.code + rightEval.code + s"""
if (!${leftEval.isNull} && !${rightEval.isNull}) {
- ${leftEval.primitive}.union((${htype})${rightEval.primitive});
+ ${leftEval.value}.union((${htype})${rightEval.value});
}
"""
case _ => super.genCode(ctx, ev)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
index 4ab27c044f..abc5c94589 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
@@ -50,12 +50,12 @@ case class Concat(children: Seq[Expression]) extends Expression with ImplicitCas
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val evals = children.map(_.gen(ctx))
val inputs = evals.map { eval =>
- s"${eval.isNull} ? null : ${eval.primitive}"
+ s"${eval.isNull} ? null : ${eval.value}"
}.mkString(", ")
evals.map(_.code).mkString("\n") + s"""
boolean ${ev.isNull} = false;
- UTF8String ${ev.primitive} = UTF8String.concat($inputs);
- if (${ev.primitive} == null) {
+ UTF8String ${ev.value} = UTF8String.concat($inputs);
+ if (${ev.value} == null) {
${ev.isNull} = true;
}
"""
@@ -104,12 +104,12 @@ case class ConcatWs(children: Seq[Expression])
val evals = children.map(_.gen(ctx))
val inputs = evals.map { eval =>
- s"${eval.isNull} ? (UTF8String) null : ${eval.primitive}"
+ s"${eval.isNull} ? (UTF8String) null : ${eval.value}"
}.mkString(", ")
evals.map(_.code).mkString("\n") + s"""
- UTF8String ${ev.primitive} = UTF8String.concatWs($inputs);
- boolean ${ev.isNull} = ${ev.primitive} == null;
+ UTF8String ${ev.value} = UTF8String.concatWs($inputs);
+ boolean ${ev.isNull} = ${ev.value} == null;
"""
} else {
val array = ctx.freshName("array")
@@ -121,19 +121,19 @@ case class ConcatWs(children: Seq[Expression])
child.dataType match {
case StringType =>
("", // we count all the StringType arguments num at once below.
- s"$array[$idxInVararg ++] = ${eval.isNull} ? (UTF8String) null : ${eval.primitive};")
+ s"$array[$idxInVararg ++] = ${eval.isNull} ? (UTF8String) null : ${eval.value};")
case _: ArrayType =>
val size = ctx.freshName("n")
(s"""
if (!${eval.isNull}) {
- $varargNum += ${eval.primitive}.numElements();
+ $varargNum += ${eval.value}.numElements();
}
""",
s"""
if (!${eval.isNull}) {
- final int $size = ${eval.primitive}.numElements();
+ final int $size = ${eval.value}.numElements();
for (int j = 0; j < $size; j ++) {
- $array[$idxInVararg ++] = ${ctx.getValue(eval.primitive, StringType, "j")};
+ $array[$idxInVararg ++] = ${ctx.getValue(eval.value, StringType, "j")};
}
}
""")
@@ -147,8 +147,8 @@ case class ConcatWs(children: Seq[Expression])
${varargCount.mkString("\n")}
UTF8String[] $array = new UTF8String[$varargNum];
${varargBuild.mkString("\n")}
- UTF8String ${ev.primitive} = UTF8String.concatWs(${evals.head.primitive}, $array);
- boolean ${ev.isNull} = ${ev.primitive} == null;
+ UTF8String ${ev.value} = UTF8String.concatWs(${evals.head.value}, $array);
+ boolean ${ev.isNull} = ${ev.value} == null;
"""
}
}
@@ -308,7 +308,7 @@ case class StringTranslate(srcExpr: Expression, matchingExpr: Expression, replac
${termDict} = org.apache.spark.sql.catalyst.expressions.StringTranslate
.buildDict(${termLastMatching}, ${termLastReplace});
}
- ${ev.primitive} = ${src}.translate(${termDict});
+ ${ev.value} = ${src}.translate(${termDict});
"""
})
}
@@ -334,7 +334,7 @@ case class FindInSet(left: Expression, right: Expression) extends BinaryExpressi
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, (word, set) =>
- s"${ev.primitive} = $set.findInSet($word);"
+ s"${ev.value} = $set.findInSet($word);"
)
}
@@ -481,7 +481,7 @@ case class StringLocate(substr: Expression, str: Expression, start: Expression)
val strGen = str.gen(ctx)
val startGen = start.gen(ctx)
s"""
- int ${ev.primitive} = 0;
+ int ${ev.value} = 0;
boolean ${ev.isNull} = false;
${startGen.code}
if (!${startGen.isNull}) {
@@ -489,8 +489,8 @@ case class StringLocate(substr: Expression, str: Expression, start: Expression)
if (!${substrGen.isNull}) {
${strGen.code}
if (!${strGen.isNull}) {
- ${ev.primitive} = ${strGen.primitive}.indexOf(${substrGen.primitive},
- ${startGen.primitive}) + 1;
+ ${ev.value} = ${strGen.value}.indexOf(${substrGen.value},
+ ${startGen.value}) + 1;
} else {
${ev.isNull} = true;
}
@@ -586,9 +586,9 @@ case class FormatString(children: Expression*) extends Expression with ImplicitC
if (ctx.boxedType(v._1) != ctx.javaType(v._1)) {
// Java primitives get boxed in order to allow null values.
s"(${v._2.isNull}) ? (${ctx.boxedType(v._1)}) null : " +
- s"new ${ctx.boxedType(v._1)}(${v._2.primitive})"
+ s"new ${ctx.boxedType(v._1)}(${v._2.value})"
} else {
- s"(${v._2.isNull}) ? null : ${v._2.primitive}"
+ s"(${v._2.isNull}) ? null : ${v._2.value}"
}
s + "," + nullSafeString
})
@@ -600,13 +600,13 @@ case class FormatString(children: Expression*) extends Expression with ImplicitC
s"""
${pattern.code}
boolean ${ev.isNull} = ${pattern.isNull};
- ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
+ ${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
${argListCode.mkString}
$stringBuffer $sb = new $stringBuffer();
$formatter $form = new $formatter($sb, ${classOf[Locale].getName}.US);
- $form.format(${pattern.primitive}.toString() $argListString);
- ${ev.primitive} = UTF8String.fromString($sb.toString());
+ $form.format(${pattern.value}.toString() $argListString);
+ ${ev.value} = UTF8String.fromString($sb.toString());
}
"""
}
@@ -682,7 +682,7 @@ case class StringSpace(child: Expression)
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, (length) =>
- s"""${ev.primitive} = UTF8String.blankString(($length < 0) ? 0 : $length);""")
+ s"""${ev.value} = UTF8String.blankString(($length < 0) ? 0 : $length);""")
}
override def prettyName: String = "space"
@@ -760,7 +760,7 @@ case class Levenshtein(left: Expression, right: Expression) extends BinaryExpres
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, (left, right) =>
- s"${ev.primitive} = $left.levenshteinDistance($right);")
+ s"${ev.value} = $left.levenshteinDistance($right);")
}
}
@@ -803,9 +803,9 @@ case class Ascii(child: Expression) extends UnaryExpression with ImplicitCastInp
s"""
byte[] $bytes = $child.getBytes();
if ($bytes.length > 0) {
- ${ev.primitive} = (int) $bytes[0];
+ ${ev.value} = (int) $bytes[0];
} else {
- ${ev.primitive} = 0;
+ ${ev.value} = 0;
}
"""})
}
@@ -827,7 +827,7 @@ case class Base64(child: Expression) extends UnaryExpression with ImplicitCastIn
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, (child) => {
- s"""${ev.primitive} = UTF8String.fromBytes(
+ s"""${ev.value} = UTF8String.fromBytes(
org.apache.commons.codec.binary.Base64.encodeBase64($child));
"""})
}
@@ -848,7 +848,7 @@ case class UnBase64(child: Expression) extends UnaryExpression with ImplicitCast
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, (child) => {
s"""
- ${ev.primitive} = org.apache.commons.codec.binary.Base64.decodeBase64($child.toString());
+ ${ev.value} = org.apache.commons.codec.binary.Base64.decodeBase64($child.toString());
"""})
}
}
@@ -875,7 +875,7 @@ case class Decode(bin: Expression, charset: Expression)
nullSafeCodeGen(ctx, ev, (bytes, charset) =>
s"""
try {
- ${ev.primitive} = UTF8String.fromString(new String($bytes, $charset.toString()));
+ ${ev.value} = UTF8String.fromString(new String($bytes, $charset.toString()));
} catch (java.io.UnsupportedEncodingException e) {
org.apache.spark.unsafe.Platform.throwException(e);
}
@@ -905,7 +905,7 @@ case class Encode(value: Expression, charset: Expression)
nullSafeCodeGen(ctx, ev, (string, charset) =>
s"""
try {
- ${ev.primitive} = $string.toString().getBytes($charset.toString());
+ ${ev.value} = $string.toString().getBytes($charset.toString());
} catch (java.io.UnsupportedEncodingException e) {
org.apache.spark.unsafe.Platform.throwException(e);
}""")
@@ -1014,9 +1014,9 @@ case class FormatNumber(x: Expression, d: Expression)
$lastDValue = $d;
$numberFormat.applyPattern($dFormat.toPattern());
}
- ${ev.primitive} = UTF8String.fromString($numberFormat.format(${typeHelper(num)}));
+ ${ev.value} = UTF8String.fromString($numberFormat.format(${typeHelper(num)}));
} else {
- ${ev.primitive} = null;
+ ${ev.value} = null;
${ev.isNull} = true;
}
"""