aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorYin Huai <yhuai@databricks.com>2015-06-25 06:52:03 -0700
committerYin Huai <yhuai@databricks.com>2015-06-25 06:52:03 -0700
commitf9b397f54d1c491680d70aba210bb8211fd249c1 (patch)
tree0d3232294068e0134b63d22095a516cc0efb5cb5 /sql
parente988adb58f02d06065837f3d79eee220f6558def (diff)
downloadspark-f9b397f54d1c491680d70aba210bb8211fd249c1.tar.gz
spark-f9b397f54d1c491680d70aba210bb8211fd249c1.tar.bz2
spark-f9b397f54d1c491680d70aba210bb8211fd249c1.zip
[SPARK-8567] [SQL] Add logs to record the progress of HiveSparkSubmitSuite.
Author: Yin Huai <yhuai@databricks.com> Closes #7009 from yhuai/SPARK-8567 and squashes the following commits: 62fb1f9 [Yin Huai] Add sc.stop(). b22cf7d [Yin Huai] Add logs.
Diffstat (limited to 'sql')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala9
1 files changed, 9 insertions, 0 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index b875e52b98..a38ed23b5c 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -115,6 +115,7 @@ object SparkSubmitClassLoaderTest extends Logging {
val sc = new SparkContext(conf)
val hiveContext = new TestHiveContext(sc)
val df = hiveContext.createDataFrame((1 to 100).map(i => (i, i))).toDF("i", "j")
+ logInfo("Testing load classes at the driver side.")
// First, we load classes at driver side.
try {
Class.forName(args(0), true, Thread.currentThread().getContextClassLoader)
@@ -124,6 +125,7 @@ object SparkSubmitClassLoaderTest extends Logging {
throw new Exception("Could not load user class from jar:\n", t)
}
// Second, we load classes at the executor side.
+ logInfo("Testing load classes at the executor side.")
val result = df.mapPartitions { x =>
var exception: String = null
try {
@@ -141,6 +143,7 @@ object SparkSubmitClassLoaderTest extends Logging {
}
// Load a Hive UDF from the jar.
+ logInfo("Registering temporary Hive UDF provided in a jar.")
hiveContext.sql(
"""
|CREATE TEMPORARY FUNCTION example_max
@@ -150,18 +153,23 @@ object SparkSubmitClassLoaderTest extends Logging {
hiveContext.createDataFrame((1 to 10).map(i => (i, s"str$i"))).toDF("key", "val")
source.registerTempTable("sourceTable")
// Load a Hive SerDe from the jar.
+ logInfo("Creating a Hive table with a SerDe provided in a jar.")
hiveContext.sql(
"""
|CREATE TABLE t1(key int, val string)
|ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'
""".stripMargin)
// Actually use the loaded UDF and SerDe.
+ logInfo("Writing data into the table.")
hiveContext.sql(
"INSERT INTO TABLE t1 SELECT example_max(key) as key, val FROM sourceTable GROUP BY val")
+ logInfo("Running a simple query on the table.")
val count = hiveContext.table("t1").orderBy("key", "val").count()
if (count != 10) {
throw new Exception(s"table t1 should have 10 rows instead of $count rows")
}
+ logInfo("Test finishes.")
+ sc.stop()
}
}
@@ -199,5 +207,6 @@ object SparkSQLConfTest extends Logging {
val hiveContext = new TestHiveContext(sc)
// Run a simple command to make sure all lazy vals in hiveContext get instantiated.
hiveContext.tables().collect()
+ sc.stop()
}
}