aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2014-07-27 18:46:58 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-07-27 18:46:58 -0700
commite5bbce9a60eb99c059315edbf18a1a923d93d9d5 (patch)
treef54fe681dd875770c4c6c15bd97f7f7a2f76ddec /core
parent81fcdd22c8ef52889ed51b3ec5c2747708505fc2 (diff)
downloadspark-e5bbce9a60eb99c059315edbf18a1a923d93d9d5.tar.gz
spark-e5bbce9a60eb99c059315edbf18a1a923d93d9d5.tar.bz2
spark-e5bbce9a60eb99c059315edbf18a1a923d93d9d5.zip
Revert "[SPARK-2410][SQL] Merging Hive Thrift/JDBC server"
This reverts commit f6ff2a61d00d12481bfb211ae13d6992daacdcc2.
Diffstat (limited to 'core')
-rw-r--r--core/pom.xml2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala14
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala5
3 files changed, 5 insertions, 16 deletions
diff --git a/core/pom.xml b/core/pom.xml
index a24743495b..1054cec4d7 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -28,7 +28,7 @@
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<properties>
- <sbt.project.name>core</sbt.project.name>
+ <sbt.project.name>core</sbt.project.name>
</properties>
<packaging>jar</packaging>
<name>Spark Project Core</name>
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index c9cec33eba..3b5642b6ca 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -46,10 +46,6 @@ object SparkSubmit {
private val CLUSTER = 2
private val ALL_DEPLOY_MODES = CLIENT | CLUSTER
- // A special jar name that indicates the class being run is inside of Spark itself, and therefore
- // no user jar is needed.
- private val SPARK_INTERNAL = "spark-internal"
-
// Special primary resource names that represent shells rather than application jars.
private val SPARK_SHELL = "spark-shell"
private val PYSPARK_SHELL = "pyspark-shell"
@@ -261,9 +257,7 @@ object SparkSubmit {
// In yarn-cluster mode, use yarn.Client as a wrapper around the user class
if (clusterManager == YARN && deployMode == CLUSTER) {
childMainClass = "org.apache.spark.deploy.yarn.Client"
- if (args.primaryResource != SPARK_INTERNAL) {
- childArgs += ("--jar", args.primaryResource)
- }
+ childArgs += ("--jar", args.primaryResource)
childArgs += ("--class", args.mainClass)
if (args.childArgs != null) {
args.childArgs.foreach { arg => childArgs += ("--arg", arg) }
@@ -338,7 +332,7 @@ object SparkSubmit {
* Return whether the given primary resource represents a user jar.
*/
private def isUserJar(primaryResource: String): Boolean = {
- !isShell(primaryResource) && !isPython(primaryResource) && !isInternal(primaryResource)
+ !isShell(primaryResource) && !isPython(primaryResource)
}
/**
@@ -355,10 +349,6 @@ object SparkSubmit {
primaryResource.endsWith(".py") || primaryResource == PYSPARK_SHELL
}
- private[spark] def isInternal(primaryResource: String): Boolean = {
- primaryResource == SPARK_INTERNAL
- }
-
/**
* Merge a sequence of comma-separated file lists, some of which may be null to indicate
* no files, into a single comma-separated string.
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index 01d0ae541a..3ab67a43a3 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -204,9 +204,8 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
/** Fill in values by parsing user options. */
private def parseOpts(opts: Seq[String]): Unit = {
- var inSparkOpts = true
-
// Delineates parsing of Spark options from parsing of user options.
+ var inSparkOpts = true
parse(opts)
def parse(opts: Seq[String]): Unit = opts match {
@@ -319,7 +318,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
SparkSubmit.printErrorAndExit(errMessage)
case v =>
primaryResource =
- if (!SparkSubmit.isShell(v) && !SparkSubmit.isInternal(v)) {
+ if (!SparkSubmit.isShell(v)) {
Utils.resolveURI(v).toString
} else {
v