aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst/src/main
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-07-03 16:59:40 +0800
committerWenchen Fan <wenchen@databricks.com>2016-07-03 16:59:40 +0800
commit54b27c1797fcd32b3f3e9d44e1a149ae396a61e6 (patch)
tree9d00060aa80e539659bbb8d202bfcddf628c885c /sql/catalyst/src/main
parentea990f96930066c36055734d4f17eaf8e496eb3f (diff)
downloadspark-54b27c1797fcd32b3f3e9d44e1a149ae396a61e6.tar.gz
spark-54b27c1797fcd32b3f3e9d44e1a149ae396a61e6.tar.bz2
spark-54b27c1797fcd32b3f3e9d44e1a149ae396a61e6.zip
[SPARK-16278][SPARK-16279][SQL] Implement map_keys/map_values SQL functions
## What changes were proposed in this pull request? This PR adds `map_keys` and `map_values` SQL functions in order to remove Hive fallback. ## How was this patch tested? Pass the Jenkins tests including new testcases. Author: Dongjoon Hyun <dongjoon@apache.org> Closes #13967 from dongjoon-hyun/SPARK-16278.
Diffstat (limited to 'sql/catalyst/src/main')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala48
2 files changed, 50 insertions, 0 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index 26b0c30db4..e7f335f4fb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -171,6 +171,8 @@ object FunctionRegistry {
expression[IsNotNull]("isnotnull"),
expression[Least]("least"),
expression[CreateMap]("map"),
+ expression[MapKeys]("map_keys"),
+ expression[MapValues]("map_values"),
expression[CreateNamedStruct]("named_struct"),
expression[NaNvl]("nanvl"),
expression[NullIf]("nullif"),
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
index c71cb73d65..2e8ea1107c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
@@ -44,6 +44,54 @@ case class Size(child: Expression) extends UnaryExpression with ExpectsInputType
}
/**
+ * Returns an unordered array containing the keys of the map.
+ */
+@ExpressionDescription(
+ usage = "_FUNC_(map) - Returns an unordered array containing the keys of the map.",
+ extended = " > SELECT _FUNC_(map(1, 'a', 2, 'b'));\n [1,2]")
+case class MapKeys(child: Expression)
+ extends UnaryExpression with ExpectsInputTypes {
+
+ override def inputTypes: Seq[AbstractDataType] = Seq(MapType)
+
+ override def dataType: DataType = ArrayType(child.dataType.asInstanceOf[MapType].keyType)
+
+ override def nullSafeEval(map: Any): Any = {
+ map.asInstanceOf[MapData].keyArray()
+ }
+
+ override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
+ nullSafeCodeGen(ctx, ev, c => s"${ev.value} = ($c).keyArray();")
+ }
+
+ override def prettyName: String = "map_keys"
+}
+
+/**
+ * Returns an unordered array containing the values of the map.
+ */
+@ExpressionDescription(
+ usage = "_FUNC_(map) - Returns an unordered array containing the values of the map.",
+ extended = " > SELECT _FUNC_(map(1, 'a', 2, 'b'));\n [\"a\",\"b\"]")
+case class MapValues(child: Expression)
+ extends UnaryExpression with ExpectsInputTypes {
+
+ override def inputTypes: Seq[AbstractDataType] = Seq(MapType)
+
+ override def dataType: DataType = ArrayType(child.dataType.asInstanceOf[MapType].valueType)
+
+ override def nullSafeEval(map: Any): Any = {
+ map.asInstanceOf[MapData].valueArray()
+ }
+
+ override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
+ nullSafeCodeGen(ctx, ev, c => s"${ev.value} = ($c).valueArray();")
+ }
+
+ override def prettyName: String = "map_values"
+}
+
+/**
* Sorts the input array in ascending / descending order according to the natural ordering of
* the array elements and returns it.
*/