aboutsummaryrefslogtreecommitdiff
path: root/bin/spark-sql
diff options
context:
space:
mode:
authorCheng Lian <lian.cs.zju@gmail.com>2014-08-26 17:33:40 -0700
committerMichael Armbrust <michael@databricks.com>2014-08-26 17:33:57 -0700
commitc0e1f99f57b540cbcfd880662a778477c70fc94d (patch)
tree816ef44a1afcb93219b11a9f7d0fca661fe6e1f6 /bin/spark-sql
parenta308a1624fd5783422149e641aa748fec9bf4c63 (diff)
downloadspark-c0e1f99f57b540cbcfd880662a778477c70fc94d.tar.gz
spark-c0e1f99f57b540cbcfd880662a778477c70fc94d.tar.bz2
spark-c0e1f99f57b540cbcfd880662a778477c70fc94d.zip
[SPARK-2964] [SQL] Remove duplicated code from spark-sql and start-thriftserver.sh
Author: Cheng Lian <lian.cs.zju@gmail.com> Author: Kousuke Saruta <sarutak@oss.nttdata.co.jp> Closes #1886 from sarutak/SPARK-2964 and squashes the following commits: 8ef8751 [Kousuke Saruta] Merge branch 'master' of git://git.apache.org/spark into SPARK-2964 26e7c95 [Kousuke Saruta] Revert "Shorten timeout to more reasonable value" ffb68fa [Kousuke Saruta] Modified spark-sql and start-thriftserver.sh to use bin/utils.sh 8c6f658 [Kousuke Saruta] Merge branch 'spark-3026' of https://github.com/liancheng/spark into SPARK-2964 81b43a8 [Cheng Lian] Shorten timeout to more reasonable value a89e66d [Cheng Lian] Fixed command line options quotation in scripts 9c894d3 [Cheng Lian] Fixed bin/spark-sql -S option typo be4736b [Cheng Lian] Report better error message when running JDBC/CLI without hive-thriftserver profile enabled (cherry picked from commit faeb9c0e1440f4af888be0dfc5de7b57efc92b00) Signed-off-by: Michael Armbrust <michael@databricks.com>
Diffstat (limited to 'bin/spark-sql')
-rwxr-xr-xbin/spark-sql55
1 files changed, 13 insertions, 42 deletions
diff --git a/bin/spark-sql b/bin/spark-sql
index 564f1f4190..2a3cb31f58 100755
--- a/bin/spark-sql
+++ b/bin/spark-sql
@@ -24,6 +24,7 @@
set -o posix
CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver"
+CLASS_NOT_FOUND_EXIT_STATUS=1
# Figure out where Spark is installed
FWDIR="$(cd `dirname $0`/..; pwd)"
@@ -43,52 +44,22 @@ function usage {
$FWDIR/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
}
-function ensure_arg_number {
- arg_number=$1
- at_least=$2
-
- if [[ $arg_number -lt $at_least ]]; then
- usage
- exit 1
- fi
-}
-
-if [[ "$@" = --help ]] || [[ "$@" = -h ]]; then
+if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage
exit 0
fi
-CLI_ARGS=()
-SUBMISSION_ARGS=()
-
-while (($#)); do
- case $1 in
- -d | --define | --database | -f | -h | --hiveconf | --hivevar | -i | -p)
- ensure_arg_number $# 2
- CLI_ARGS+=("$1"); shift
- CLI_ARGS+=("$1"); shift
- ;;
+source $FWDIR/bin/utils.sh
+SUBMIT_USAGE_FUNCTION=usage
+gatherSparkSubmitOpts "$@"
- -e)
- ensure_arg_number $# 2
- CLI_ARGS+=("$1"); shift
- CLI_ARGS+=("$1"); shift
- ;;
+"$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_OPTS[@]}" spark-internal "${APPLICATION_OPTS[@]}"
+exit_status=$?
- -s | --silent)
- CLI_ARGS+=("$1"); shift
- ;;
-
- -v | --verbose)
- # Both SparkSubmit and SparkSQLCLIDriver recognizes -v | --verbose
- CLI_ARGS+=("$1")
- SUBMISSION_ARGS+=("$1"); shift
- ;;
-
- *)
- SUBMISSION_ARGS+=("$1"); shift
- ;;
- esac
-done
+if [[ exit_status -eq CLASS_NOT_FOUND_EXIT_STATUS ]]; then
+ echo
+ echo "Failed to load Spark SQL CLI main class $CLASS."
+ echo "You need to build Spark with -Phive."
+fi
-exec "$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_ARGS[@]}" spark-internal "${CLI_ARGS[@]}"
+exit $exit_status