aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xbin/spark-sql2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/network/nio/Connection.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala4
-rwxr-xr-xsbin/start-thriftserver.sh2
5 files changed, 6 insertions, 6 deletions
diff --git a/bin/spark-sql b/bin/spark-sql
index ae096530ca..9d66140b6a 100755
--- a/bin/spark-sql
+++ b/bin/spark-sql
@@ -24,7 +24,7 @@
set -o posix
CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver"
-CLASS_NOT_FOUND_EXIT_STATUS=1
+CLASS_NOT_FOUND_EXIT_STATUS=101
# Figure out where Spark is installed
FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 5ed3575816..5d15af1326 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -54,7 +54,7 @@ object SparkSubmit {
private val SPARK_SHELL = "spark-shell"
private val PYSPARK_SHELL = "pyspark-shell"
- private val CLASS_NOT_FOUND_EXIT_STATUS = 1
+ private val CLASS_NOT_FOUND_EXIT_STATUS = 101
// Exposed for testing
private[spark] var exitFn: () => Unit = () => System.exit(-1)
diff --git a/core/src/main/scala/org/apache/spark/network/nio/Connection.scala b/core/src/main/scala/org/apache/spark/network/nio/Connection.scala
index 74074a8dcb..18172d359c 100644
--- a/core/src/main/scala/org/apache/spark/network/nio/Connection.scala
+++ b/core/src/main/scala/org/apache/spark/network/nio/Connection.scala
@@ -460,7 +460,7 @@ private[spark] class ReceivingConnection(
if (currId != null) currId else super.getRemoteConnectionManagerId()
}
- // The reciever's remote address is the local socket on remote side : which is NOT
+ // The receiver's remote address is the local socket on remote side : which is NOT
// the connection manager id of the receiver.
// We infer that from the messages we receive on the receiver socket.
private def processConnectionManagerId(header: MessageChunkHeader) {
diff --git a/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala b/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala
index 09d3ea3065..5aa7e94943 100644
--- a/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala
+++ b/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala
@@ -501,7 +501,7 @@ private[nio] class ConnectionManager(
def changeConnectionKeyInterest(connection: Connection, ops: Int) {
keyInterestChangeRequests += ((connection.key, ops))
- // so that registerations happen !
+ // so that registrations happen !
wakeupSelector()
}
@@ -832,7 +832,7 @@ private[nio] class ConnectionManager(
}
/**
- * Send a message and block until an acknowldgment is received or an error occurs.
+ * Send a message and block until an acknowledgment is received or an error occurs.
* @param connectionManagerId the message's destination
* @param message the message being sent
* @return a Future that either returns the acknowledgment message or captures an exception.
diff --git a/sbin/start-thriftserver.sh b/sbin/start-thriftserver.sh
index 4ce40fe750..ba953e763f 100755
--- a/sbin/start-thriftserver.sh
+++ b/sbin/start-thriftserver.sh
@@ -27,7 +27,7 @@ set -o posix
FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
-CLASS_NOT_FOUND_EXIT_STATUS=1
+CLASS_NOT_FOUND_EXIT_STATUS=101
function usage {
echo "Usage: ./sbin/start-thriftserver [options] [thrift server options]"