aboutsummaryrefslogtreecommitdiff
path: root/sql/hive-thriftserver
diff options
context:
space:
mode:
authorDilip Biswal <dbiswal@us.ibm.com>2015-10-29 18:29:50 +0100
committerMichael Armbrust <michael@databricks.com>2015-10-29 18:29:50 +0100
commit8185f038c13c72e1bea7b0921b84125b7a352139 (patch)
tree3f476d7a339db0d25f3132f0d51205b899d9b7ff /sql/hive-thriftserver
parentf7a51deebad1b4c3b970a051f25d286110b94438 (diff)
downloadspark-8185f038c13c72e1bea7b0921b84125b7a352139.tar.gz
spark-8185f038c13c72e1bea7b0921b84125b7a352139.tar.bz2
spark-8185f038c13c72e1bea7b0921b84125b7a352139.zip
[SPARK-11188][SQL] Elide stacktraces in bin/spark-sql for AnalysisExceptions
Only print the error message to the console for Analysis Exceptions in sql-shell. Author: Dilip Biswal <dbiswal@us.ibm.com> Closes #9194 from dilipbiswal/spark-11188.
Diffstat (limited to 'sql/hive-thriftserver')
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala10
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala11
-rw-r--r--sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala12
3 files changed, 27 insertions, 6 deletions
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
index b5073961a1..62e912c69a 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
@@ -20,6 +20,8 @@ package org.apache.spark.sql.hive.thriftserver
import java.io._
import java.util.{ArrayList => JArrayList, Locale}
+import org.apache.spark.sql.AnalysisException
+
import scala.collection.JavaConverters._
import jline.console.ConsoleReader
@@ -298,6 +300,7 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
driver.init()
val out = sessionState.out
+ val err = sessionState.err
val start: Long = System.currentTimeMillis()
if (sessionState.getIsVerbose) {
out.println(cmd)
@@ -308,7 +311,12 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
ret = rc.getResponseCode
if (ret != 0) {
- console.printError(rc.getErrorMessage())
+ // For analysis exception, only the error is printed out to the console.
+ rc.getException() match {
+ case e : AnalysisException =>
+ err.println(s"""Error in query: ${e.getMessage}""")
+ case _ => err.println(rc.getErrorMessage())
+ }
driver.close()
return ret
}
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
index 2619286afc..f1ec723852 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
@@ -18,6 +18,8 @@
package org.apache.spark.sql.hive.thriftserver
import java.util.{Arrays, ArrayList => JArrayList, List => JList}
+import org.apache.log4j.LogManager
+import org.apache.spark.sql.AnalysisException
import scala.collection.JavaConverters._
@@ -63,9 +65,12 @@ private[hive] class SparkSQLDriver(
tableSchema = getResultSetSchema(execution)
new CommandProcessorResponse(0)
} catch {
- case cause: Throwable =>
- logError(s"Failed in [$command]", cause)
- new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(cause), null)
+ case ae: AnalysisException =>
+ logDebug(s"Failed in [$command]", ae)
+ new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(ae), null, ae)
+ case cause: Throwable =>
+ logError(s"Failed in [$command]", cause)
+ new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(cause), null, cause)
}
}
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
index 76d1591a23..3fa5c8528b 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
@@ -58,7 +58,7 @@ class CliSuite extends SparkFunSuite with BeforeAndAfter with Logging {
* @param timeout maximum time for the commands to complete
* @param extraArgs any extra arguments
* @param errorResponses a sequence of strings whose presence in the stdout of the forked process
- * is taken as an immediate error condition. That is: if a line beginning
+ * is taken as an immediate error condition. That is: if a line containing
* with one of these strings is found, fail the test immediately.
* The default value is `Seq("Error:")`
*
@@ -104,7 +104,7 @@ class CliSuite extends SparkFunSuite with BeforeAndAfter with Logging {
}
} else {
errorResponses.foreach { r =>
- if (line.startsWith(r)) {
+ if (line.contains(r)) {
foundAllExpectedAnswers.tryFailure(
new RuntimeException(s"Failed with error line '$line'"))
}
@@ -219,4 +219,12 @@ class CliSuite extends SparkFunSuite with BeforeAndAfter with Logging {
-> "OK"
)
}
+
+ test("SPARK-11188 Analysis error reporting") {
+ runCliWithin(timeout = 2.minute,
+ errorResponses = Seq("AnalysisException"))(
+ "select * from nonexistent_table;"
+ -> "Error in query: Table not found: nonexistent_table;"
+ )
+ }
}