aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-11-11 12:47:02 -0800
committerReynold Xin <rxin@databricks.com>2015-11-11 12:47:02 -0800
commitdf97df2b39194f60051f78cce23f0ba6cfe4b1df (patch)
tree372c82c02e3bd4e4a5895adfb954534ed42a5ae7 /sql/hive
parent27029bc8f6246514bd0947500c94cf38dc8616c3 (diff)
downloadspark-df97df2b39194f60051f78cce23f0ba6cfe4b1df.tar.gz
spark-df97df2b39194f60051f78cce23f0ba6cfe4b1df.tar.bz2
spark-df97df2b39194f60051f78cce23f0ba6cfe4b1df.zip
[SPARK-11644][SQL] Remove the option to turn off unsafe and codegen.
Author: Reynold Xin <rxin@databricks.com> Closes #9618 from rxin/SPARK-11644.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala44
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala3
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala72
3 files changed, 34 insertions, 85 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala
index 22d2aefd69..61e3e913c2 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala
@@ -808,54 +808,12 @@ abstract class AggregationQuerySuite extends QueryTest with SQLTestUtils with Te
}
}
-class SortBasedAggregationQuerySuite extends AggregationQuerySuite {
- var originalUnsafeEnabled: Boolean = _
+class TungstenAggregationQuerySuite extends AggregationQuerySuite
- override def beforeAll(): Unit = {
- originalUnsafeEnabled = sqlContext.conf.unsafeEnabled
- sqlContext.setConf(SQLConf.UNSAFE_ENABLED.key, "false")
- super.beforeAll()
- }
-
- override def afterAll(): Unit = {
- super.afterAll()
- sqlContext.setConf(SQLConf.UNSAFE_ENABLED.key, originalUnsafeEnabled.toString)
- }
-}
-
-class TungstenAggregationQuerySuite extends AggregationQuerySuite {
-
- var originalUnsafeEnabled: Boolean = _
-
- override def beforeAll(): Unit = {
- originalUnsafeEnabled = sqlContext.conf.unsafeEnabled
- sqlContext.setConf(SQLConf.UNSAFE_ENABLED.key, "true")
- super.beforeAll()
- }
-
- override def afterAll(): Unit = {
- super.afterAll()
- sqlContext.setConf(SQLConf.UNSAFE_ENABLED.key, originalUnsafeEnabled.toString)
- }
-}
class TungstenAggregationQueryWithControlledFallbackSuite extends AggregationQuerySuite {
- var originalUnsafeEnabled: Boolean = _
-
- override def beforeAll(): Unit = {
- originalUnsafeEnabled = sqlContext.conf.unsafeEnabled
- sqlContext.setConf(SQLConf.UNSAFE_ENABLED.key, "true")
- super.beforeAll()
- }
-
- override def afterAll(): Unit = {
- super.afterAll()
- sqlContext.setConf(SQLConf.UNSAFE_ENABLED.key, originalUnsafeEnabled.toString)
- sqlContext.conf.unsetConf("spark.sql.TungstenAggregate.testFallbackStartsAt")
- }
-
override protected def checkAnswer(actual: => DataFrame, expectedAnswer: Seq[Row]): Unit = {
(0 to 2).foreach { fallbackStartsAt =>
sqlContext.setConf(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
index 94162da4ea..a7b7ad0093 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
@@ -37,8 +37,7 @@ class HiveExplainSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
"== Parsed Logical Plan ==",
"== Analyzed Logical Plan ==",
"== Optimized Logical Plan ==",
- "== Physical Plan ==",
- "Code Generation")
+ "== Physical Plan ==")
}
test("explain create table command") {
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
index 5f9a447759..5ab477efc4 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
@@ -28,11 +28,11 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspector, ObjectInspectorFactory}
import org.apache.hadoop.hive.serde2.{AbstractSerDe, SerDeStats}
import org.apache.hadoop.io.Writable
-import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SQLConf}
+import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.hive.test.TestHiveSingleton
-
import org.apache.spark.util.Utils
+
case class Fields(f1: Int, f2: Int, f3: Int, f4: Int, f5: Int)
// Case classes for the custom UDF's.
@@ -92,44 +92,36 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton {
}
test("Max/Min on named_struct") {
- def testOrderInStruct(): Unit = {
- checkAnswer(sql(
- """
- |SELECT max(named_struct(
- | "key", key,
- | "value", value)).value FROM src
- """.stripMargin), Seq(Row("val_498")))
- checkAnswer(sql(
- """
- |SELECT min(named_struct(
- | "key", key,
- | "value", value)).value FROM src
- """.stripMargin), Seq(Row("val_0")))
-
- // nested struct cases
- checkAnswer(sql(
- """
- |SELECT max(named_struct(
- | "key", named_struct(
- "key", key,
- "value", value),
- | "value", value)).value FROM src
- """.stripMargin), Seq(Row("val_498")))
- checkAnswer(sql(
- """
- |SELECT min(named_struct(
- | "key", named_struct(
- "key", key,
- "value", value),
- | "value", value)).value FROM src
- """.stripMargin), Seq(Row("val_0")))
- }
- val codegenDefault = hiveContext.getConf(SQLConf.CODEGEN_ENABLED)
- hiveContext.setConf(SQLConf.CODEGEN_ENABLED, true)
- testOrderInStruct()
- hiveContext.setConf(SQLConf.CODEGEN_ENABLED, false)
- testOrderInStruct()
- hiveContext.setConf(SQLConf.CODEGEN_ENABLED, codegenDefault)
+ checkAnswer(sql(
+ """
+ |SELECT max(named_struct(
+ | "key", key,
+ | "value", value)).value FROM src
+ """.stripMargin), Seq(Row("val_498")))
+ checkAnswer(sql(
+ """
+ |SELECT min(named_struct(
+ | "key", key,
+ | "value", value)).value FROM src
+ """.stripMargin), Seq(Row("val_0")))
+
+ // nested struct cases
+ checkAnswer(sql(
+ """
+ |SELECT max(named_struct(
+ | "key", named_struct(
+ "key", key,
+ "value", value),
+ | "value", value)).value FROM src
+ """.stripMargin), Seq(Row("val_498")))
+ checkAnswer(sql(
+ """
+ |SELECT min(named_struct(
+ | "key", named_struct(
+ "key", key,
+ "value", value),
+ | "value", value)).value FROM src
+ """.stripMargin), Seq(Row("val_0")))
}
test("SPARK-6409 UDAF Average test") {