aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCheng Lian <lian.cs.zju@gmail.com>2014-08-26 17:33:40 -0700
committerMichael Armbrust <michael@databricks.com>2014-08-26 17:33:40 -0700
commitfaeb9c0e1440f4af888be0dfc5de7b57efc92b00 (patch)
treef9e4efd06835dc80bd0bf8f6ecdc8b4af0dc866d
parent2ffd3290fe30c23df8da1efe153b84c23eb2e1cd (diff)
downloadspark-faeb9c0e1440f4af888be0dfc5de7b57efc92b00.tar.gz
spark-faeb9c0e1440f4af888be0dfc5de7b57efc92b00.tar.bz2
spark-faeb9c0e1440f4af888be0dfc5de7b57efc92b00.zip
[SPARK-2964] [SQL] Remove duplicated code from spark-sql and start-thriftserver.sh
Author: Cheng Lian <lian.cs.zju@gmail.com> Author: Kousuke Saruta <sarutak@oss.nttdata.co.jp> Closes #1886 from sarutak/SPARK-2964 and squashes the following commits: 8ef8751 [Kousuke Saruta] Merge branch 'master' of git://git.apache.org/spark into SPARK-2964 26e7c95 [Kousuke Saruta] Revert "Shorten timeout to more reasonable value" ffb68fa [Kousuke Saruta] Modified spark-sql and start-thriftserver.sh to use bin/utils.sh 8c6f658 [Kousuke Saruta] Merge branch 'spark-3026' of https://github.com/liancheng/spark into SPARK-2964 81b43a8 [Cheng Lian] Shorten timeout to more reasonable value a89e66d [Cheng Lian] Fixed command line options quotation in scripts 9c894d3 [Cheng Lian] Fixed bin/spark-sql -S option typo be4736b [Cheng Lian] Report better error message when running JDBC/CLI without hive-thriftserver profile enabled
-rwxr-xr-xbin/spark-sql55
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala14
-rwxr-xr-xsbin/start-thriftserver.sh39
3 files changed, 39 insertions, 69 deletions
diff --git a/bin/spark-sql b/bin/spark-sql
index 564f1f4190..2a3cb31f58 100755
--- a/bin/spark-sql
+++ b/bin/spark-sql
@@ -24,6 +24,7 @@
set -o posix
CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver"
+CLASS_NOT_FOUND_EXIT_STATUS=1
# Figure out where Spark is installed
FWDIR="$(cd `dirname $0`/..; pwd)"
@@ -43,52 +44,22 @@ function usage {
$FWDIR/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
}
-function ensure_arg_number {
- arg_number=$1
- at_least=$2
-
- if [[ $arg_number -lt $at_least ]]; then
- usage
- exit 1
- fi
-}
-
-if [[ "$@" = --help ]] || [[ "$@" = -h ]]; then
+if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage
exit 0
fi
-CLI_ARGS=()
-SUBMISSION_ARGS=()
-
-while (($#)); do
- case $1 in
- -d | --define | --database | -f | -h | --hiveconf | --hivevar | -i | -p)
- ensure_arg_number $# 2
- CLI_ARGS+=("$1"); shift
- CLI_ARGS+=("$1"); shift
- ;;
+source $FWDIR/bin/utils.sh
+SUBMIT_USAGE_FUNCTION=usage
+gatherSparkSubmitOpts "$@"
- -e)
- ensure_arg_number $# 2
- CLI_ARGS+=("$1"); shift
- CLI_ARGS+=("$1"); shift
- ;;
+"$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_OPTS[@]}" spark-internal "${APPLICATION_OPTS[@]}"
+exit_status=$?
- -s | --silent)
- CLI_ARGS+=("$1"); shift
- ;;
-
- -v | --verbose)
- # Both SparkSubmit and SparkSQLCLIDriver recognizes -v | --verbose
- CLI_ARGS+=("$1")
- SUBMISSION_ARGS+=("$1"); shift
- ;;
-
- *)
- SUBMISSION_ARGS+=("$1"); shift
- ;;
- esac
-done
+if [[ exit_status -eq CLASS_NOT_FOUND_EXIT_STATUS ]]; then
+ echo
+ echo "Failed to load Spark SQL CLI main class $CLASS."
+ echo "You need to build Spark with -Phive."
+fi
-exec "$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_ARGS[@]}" spark-internal "${CLI_ARGS[@]}"
+exit $exit_status
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index f8cdbc3c39..550ee72538 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -54,6 +54,8 @@ object SparkSubmit {
private val SPARK_SHELL = "spark-shell"
private val PYSPARK_SHELL = "pyspark-shell"
+ private val CLASS_NOT_FOUND_EXIT_STATUS = 1
+
// Exposed for testing
private[spark] var exitFn: () => Unit = () => System.exit(-1)
private[spark] var printStream: PrintStream = System.err
@@ -311,8 +313,18 @@ object SparkSubmit {
System.setProperty(key, value)
}
- val mainClass = Class.forName(childMainClass, true, loader)
+ var mainClass: Class[_] = null
+
+ try {
+ mainClass = Class.forName(childMainClass, true, loader)
+ } catch {
+ case e: ClassNotFoundException =>
+ e.printStackTrace(printStream)
+ System.exit(CLASS_NOT_FOUND_EXIT_STATUS)
+ }
+
val mainMethod = mainClass.getMethod("main", new Array[String](0).getClass)
+
try {
mainMethod.invoke(null, childArgs.toArray)
} catch {
diff --git a/sbin/start-thriftserver.sh b/sbin/start-thriftserver.sh
index 2c4452473c..c519a77df4 100755
--- a/sbin/start-thriftserver.sh
+++ b/sbin/start-thriftserver.sh
@@ -27,6 +27,7 @@ set -o posix
FWDIR="$(cd `dirname $0`/..; pwd)"
CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
+CLASS_NOT_FOUND_EXIT_STATUS=1
function usage {
echo "Usage: ./sbin/start-thriftserver [options] [thrift server options]"
@@ -43,36 +44,22 @@ function usage {
$FWDIR/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
}
-function ensure_arg_number {
- arg_number=$1
- at_least=$2
-
- if [[ $arg_number -lt $at_least ]]; then
- usage
- exit 1
- fi
-}
-
-if [[ "$@" = --help ]] || [[ "$@" = -h ]]; then
+if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage
exit 0
fi
-THRIFT_SERVER_ARGS=()
-SUBMISSION_ARGS=()
+source $FWDIR/bin/utils.sh
+SUBMIT_USAGE_FUNCTION=usage
+gatherSparkSubmitOpts "$@"
-while (($#)); do
- case $1 in
- --hiveconf)
- ensure_arg_number $# 2
- THRIFT_SERVER_ARGS+=("$1"); shift
- THRIFT_SERVER_ARGS+=("$1"); shift
- ;;
+"$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_OPTS[@]}" spark-internal "${APPLICATION_OPTS[@]}"
+exit_status=$?
- *)
- SUBMISSION_ARGS+=("$1"); shift
- ;;
- esac
-done
+if [[ exit_status -eq CLASS_NOT_FOUND_EXIT_STATUS ]]; then
+ echo
+ echo "Failed to load Hive Thrift server main class $CLASS."
+ echo "You need to build Spark with -Phive."
+fi
-exec "$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_ARGS[@]}" spark-internal "${THRIFT_SERVER_ARGS[@]}"
+exit $exit_status