aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-11-12 08:14:08 -0800
committerReynold Xin <rxin@databricks.com>2015-11-12 08:14:08 -0800
commit30e743364313d4b81c99de8f9a7170f5bca2771c (patch)
tree1fd2818eb2e84012db214b97715f9e02b1e30b5b /sql/catalyst
parent14cf753704ea60f358cb870b018cbcf73654f198 (diff)
downloadspark-30e743364313d4b81c99de8f9a7170f5bca2771c.tar.gz
spark-30e743364313d4b81c99de8f9a7170f5bca2771c.tar.bz2
spark-30e743364313d4b81c99de8f9a7170f5bca2771c.zip
[SPARK-11673][SQL] Remove the normal Project physical operator (and keep TungstenProject)
Also make full outer join being able to produce UnsafeRows. Author: Reynold Xin <rxin@databricks.com> Closes #9643 from rxin/SPARK-11673.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/java/org/apache/spark/sql/execution/UnsafeExternalRowSorter.java7
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala8
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Projection.scala10
5 files changed, 8 insertions, 25 deletions
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/execution/UnsafeExternalRowSorter.java b/sql/catalyst/src/main/java/org/apache/spark/sql/execution/UnsafeExternalRowSorter.java
index f7063d1e5c..3986d6e18f 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/execution/UnsafeExternalRowSorter.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/execution/UnsafeExternalRowSorter.java
@@ -170,13 +170,6 @@ final class UnsafeExternalRowSorter {
return sort();
}
- /**
- * Return true if UnsafeExternalRowSorter can sort rows with the given schema, false otherwise.
- */
- public static boolean supportsSchema(StructType schema) {
- return UnsafeProjection.canSupport(schema);
- }
-
private static final class RowComparator extends RecordComparator {
private final Ordering<InternalRow> ordering;
private final int numFields;
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
index 1ff7340557..6134f9e036 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
@@ -17,13 +17,13 @@
package org.apache.spark.sql
+import scala.reflect.ClassTag
+
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types.{ObjectType, StructField, StructType}
import org.apache.spark.util.Utils
-import scala.reflect.ClassTag
-
/**
* Used to convert a JVM object of type `T` to and from the internal Spark SQL representation.
*
@@ -123,9 +123,9 @@ object Encoders {
new ExpressionEncoder[Any](
schema,
- false,
+ flat = false,
extractExpressions,
constructExpression,
- ClassTag.apply(cls))
+ ClassTag(cls))
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
index 0b8a8abd02..6d822261b0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
@@ -153,18 +153,18 @@ trait ScalaReflection {
*/
def constructorFor[T : TypeTag]: Expression = constructorFor(typeOf[T], None)
- protected def constructorFor(
+ private def constructorFor(
tpe: `Type`,
path: Option[Expression]): Expression = ScalaReflectionLock.synchronized {
/** Returns the current path with a sub-field extracted. */
- def addToPath(part: String) =
+ def addToPath(part: String): Expression =
path
.map(p => UnresolvedExtractValue(p, expressions.Literal(part)))
.getOrElse(UnresolvedAttribute(part))
/** Returns the current path with a field at ordinal extracted. */
- def addToPathOrdinal(ordinal: Int, dataType: DataType) =
+ def addToPathOrdinal(ordinal: Int, dataType: DataType): Expression =
path
.map(p => GetStructField(p, StructField(s"_$ordinal", dataType), ordinal))
.getOrElse(BoundReference(ordinal, dataType, false))
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala
index f83df494ba..f7162e420d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala
@@ -77,7 +77,7 @@ class EquivalentExpressions {
* an empty collection if there are none.
*/
def getEquivalentExprs(e: Expression): Seq[Expression] = {
- equivalenceMap.get(Expr(e)).getOrElse(mutable.MutableList())
+ equivalenceMap.getOrElse(Expr(e), mutable.MutableList())
}
/**
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Projection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Projection.scala
index 9f0b7821ae..053e612f3e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Projection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Projection.scala
@@ -102,16 +102,6 @@ abstract class UnsafeProjection extends Projection {
object UnsafeProjection {
- /*
- * Returns whether UnsafeProjection can support given StructType, Array[DataType] or
- * Seq[Expression].
- */
- def canSupport(schema: StructType): Boolean = canSupport(schema.fields.map(_.dataType))
- def canSupport(exprs: Seq[Expression]): Boolean = canSupport(exprs.map(_.dataType).toArray)
- private def canSupport(types: Array[DataType]): Boolean = {
- types.forall(GenerateUnsafeProjection.canSupport)
- }
-
/**
* Returns an UnsafeProjection for given StructType.
*/