aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHossein <hossein@databricks.com>2015-07-30 16:16:17 -0700
committerShivaram Venkataraman <shivaram@cs.berkeley.edu>2015-07-30 16:16:17 -0700
commit157840d1b14502a4f25cff53633c927998c6ada1 (patch)
tree3f5ffc894eeef52675a094bd44cc60e1b408a6f6
parente7905a9395c1a002f50bab29e16a729e14d4ed6f (diff)
downloadspark-157840d1b14502a4f25cff53633c927998c6ada1.tar.gz
spark-157840d1b14502a4f25cff53633c927998c6ada1.tar.bz2
spark-157840d1b14502a4f25cff53633c927998c6ada1.zip
[SPARK-8742] [SPARKR] Improve SparkR error messages for DataFrame API
This patch improves SparkR error message reporting, especially with DataFrame API. When there is a user error (e.g., malformed SQL query), the message of the cause is sent back through the RPC and the R client reads it and returns it back to user. cc shivaram Author: Hossein <hossein@databricks.com> Closes #7742 from falaki/SPARK-8742 and squashes the following commits: 4f643c9 [Hossein] Not logging exceptions in RBackendHandler 4a8005c [Hossein] Returning stack track of causing exception from RBackendHandler 5cf17f0 [Hossein] Adding unit test for error messages from SQLContext 2af75d5 [Hossein] Reading error message in case of failure and stoping with that message f479c99 [Hossein] Wrting exception cause message in JVM
-rw-r--r--R/pkg/R/backend.R4
-rw-r--r--R/pkg/inst/tests/test_sparkSQL.R5
-rw-r--r--core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala10
3 files changed, 16 insertions, 3 deletions
diff --git a/R/pkg/R/backend.R b/R/pkg/R/backend.R
index 2fb6fae55f..49162838b8 100644
--- a/R/pkg/R/backend.R
+++ b/R/pkg/R/backend.R
@@ -110,6 +110,8 @@ invokeJava <- function(isStatic, objId, methodName, ...) {
# TODO: check the status code to output error information
returnStatus <- readInt(conn)
- stopifnot(returnStatus == 0)
+ if (returnStatus != 0) {
+ stop(readString(conn))
+ }
readObject(conn)
}
diff --git a/R/pkg/inst/tests/test_sparkSQL.R b/R/pkg/inst/tests/test_sparkSQL.R
index d5db97248c..61c8a7ec7d 100644
--- a/R/pkg/inst/tests/test_sparkSQL.R
+++ b/R/pkg/inst/tests/test_sparkSQL.R
@@ -1002,6 +1002,11 @@ test_that("crosstab() on a DataFrame", {
expect_identical(expected, ordered)
})
+test_that("SQL error message is returned from JVM", {
+ retError <- tryCatch(sql(sqlContext, "select * from blah"), error = function(e) e)
+ expect_equal(grepl("Table Not Found: blah", retError), TRUE)
+})
+
unlink(parquetPath)
unlink(jsonPath)
unlink(jsonPathNa)
diff --git a/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala b/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
index a5de10fe89..14dac4ed28 100644
--- a/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
+++ b/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
@@ -69,8 +69,11 @@ private[r] class RBackendHandler(server: RBackend)
case e: Exception =>
logError(s"Removing $objId failed", e)
writeInt(dos, -1)
+ writeString(dos, s"Removing $objId failed: ${e.getMessage}")
}
- case _ => dos.writeInt(-1)
+ case _ =>
+ dos.writeInt(-1)
+ writeString(dos, s"Error: unknown method $methodName")
}
} else {
handleMethodCall(isStatic, objId, methodName, numArgs, dis, dos)
@@ -146,8 +149,11 @@ private[r] class RBackendHandler(server: RBackend)
}
} catch {
case e: Exception =>
- logError(s"$methodName on $objId failed", e)
+ logError(s"$methodName on $objId failed")
writeInt(dos, -1)
+ // Writing the error message of the cause for the exception. This will be returned
+ // to user in the R process.
+ writeString(dos, Utils.exceptionString(e.getCause))
}
}