From c0b47bada3c9f0e9e0f14ab41ffb91012a357211 Mon Sep 17 00:00:00 2001 From: Cheng Hao Date: Fri, 1 Aug 2014 11:42:05 -0700 Subject: [SPARK-2767] [SQL] SparkSQL CLI doens't output error message if query failed. Author: Cheng Hao Closes #1686 from chenghao-intel/spark_sql_cli and squashes the following commits: eb664cc [Cheng Hao] Output detailed failure message in console 93b0382 [Cheng Hao] Fix Bug of no output in cli if exception thrown internally --- .../org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala | 4 +++- .../org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala | 3 +-- .../src/main/scala/org/apache/spark/sql/hive/HiveContext.scala | 7 ++++--- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala index 27268ecb92..cb17d7ce58 100755 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala @@ -288,8 +288,10 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging { out.println(cmd) } - ret = driver.run(cmd).getResponseCode + val rc = driver.run(cmd) + ret = rc.getResponseCode if (ret != 0) { + console.printError(rc.getErrorMessage()) driver.close() return ret } diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala index 5202aa9903..a56b19a4bc 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala @@ -53,10 +53,9 @@ private[hive] class SparkSQLDriver(val context: HiveContext = SparkSQLEnv.hiveCo } override def run(command: String): CommandProcessorResponse = { - val execution = context.executePlan(context.hql(command).logicalPlan) - // TODO unify the error code try { + val execution = context.executePlan(context.hql(command).logicalPlan) hiveResponse = execution.stringResult() tableSchema = getResultSetSchema(execution) new CommandProcessorResponse(0) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala index 27b444daba..7e3b8727be 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala @@ -131,12 +131,13 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) { @transient protected[hive] lazy val sessionState = { val ss = new SessionState(hiveconf) set(hiveconf.getAllProperties) // Have SQLConf pick up the initial set of HiveConf. + + ss.err = new PrintStream(outputBuffer, true, "UTF-8") + ss.out = new PrintStream(outputBuffer, true, "UTF-8") + ss } - sessionState.err = new PrintStream(outputBuffer, true, "UTF-8") - sessionState.out = new PrintStream(outputBuffer, true, "UTF-8") - override def set(key: String, value: String): Unit = { super.set(key, value) runSqlHive(s"SET $key=$value") -- cgit v1.2.3