aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2014-11-27 09:03:17 -0800
committerAaron Davidson <aaron@databricks.com>2014-11-27 09:03:17 -0800
commit5d7fe178b303918faa0893cd36963158b420309f (patch)
treeb3f3dbd8a97d46854cce75d485a5b332f7c5762b
parentc86e9bc4fdd103111280a37144a518479bb9cf0e (diff)
downloadspark-5d7fe178b303918faa0893cd36963158b420309f.tar.gz
spark-5d7fe178b303918faa0893cd36963158b420309f.tar.bz2
spark-5d7fe178b303918faa0893cd36963158b420309f.zip
SPARK-4170 [CORE] Closure problems when running Scala app that "extends App"
Warn against subclassing scala.App, and remove one instance of this in examples Author: Sean Owen <sowen@cloudera.com> Closes #3497 from srowen/SPARK-4170 and squashes the following commits: 4a6131f [Sean Owen] Restore multiline string formatting a8ca895 [Sean Owen] Warn against subclassing scala.App, and remove one instance of this in examples
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala5
-rw-r--r--docs/quick-start.md3
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala70
3 files changed, 44 insertions, 34 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 8a62519bd2..00f291823e 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -345,6 +345,11 @@ object SparkSubmit {
System.exit(CLASS_NOT_FOUND_EXIT_STATUS)
}
+ // SPARK-4170
+ if (classOf[scala.App].isAssignableFrom(mainClass)) {
+ printWarning("Subclasses of scala.App may not work correctly. Use a main() method instead.")
+ }
+
val mainMethod = mainClass.getMethod("main", new Array[String](0).getClass)
if (!Modifier.isStatic(mainMethod.getModifiers)) {
throw new IllegalStateException("The main method in the given main class must be static")
diff --git a/docs/quick-start.md b/docs/quick-start.md
index 6236de0e1f..bf643bb70e 100644
--- a/docs/quick-start.md
+++ b/docs/quick-start.md
@@ -244,6 +244,9 @@ object SimpleApp {
}
{% endhighlight %}
+Note that applications should define a `main()` method instead of extending `scala.App`.
+Subclasses of `scala.App` may not work correctly.
+
This program just counts the number of lines containing 'a' and the number containing 'b' in the
Spark README. Note that you'll need to replace YOUR_SPARK_HOME with the location where Spark is
installed. Unlike the earlier examples with the Spark shell, which initializes its own SparkContext,
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala
index 6815b1c052..6a456ba7ec 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala
@@ -33,7 +33,7 @@ import org.apache.spark.mllib.optimization.{SimpleUpdater, SquaredL2Updater, L1U
* A synthetic dataset can be found at `data/mllib/sample_linear_regression_data.txt`.
* If you use it as a template to create your own app, please use `spark-submit` to submit your app.
*/
-object LinearRegression extends App {
+object LinearRegression {
object RegType extends Enumeration {
type RegType = Value
@@ -49,40 +49,42 @@ object LinearRegression extends App {
regType: RegType = L2,
regParam: Double = 0.01) extends AbstractParams[Params]
- val defaultParams = Params()
-
- val parser = new OptionParser[Params]("LinearRegression") {
- head("LinearRegression: an example app for linear regression.")
- opt[Int]("numIterations")
- .text("number of iterations")
- .action((x, c) => c.copy(numIterations = x))
- opt[Double]("stepSize")
- .text(s"initial step size, default: ${defaultParams.stepSize}")
- .action((x, c) => c.copy(stepSize = x))
- opt[String]("regType")
- .text(s"regularization type (${RegType.values.mkString(",")}), " +
- s"default: ${defaultParams.regType}")
- .action((x, c) => c.copy(regType = RegType.withName(x)))
- opt[Double]("regParam")
- .text(s"regularization parameter, default: ${defaultParams.regParam}")
- arg[String]("<input>")
- .required()
- .text("input paths to labeled examples in LIBSVM format")
- .action((x, c) => c.copy(input = x))
- note(
- """
- |For example, the following command runs this app on a synthetic dataset:
- |
- | bin/spark-submit --class org.apache.spark.examples.mllib.LinearRegression \
- | examples/target/scala-*/spark-examples-*.jar \
- | data/mllib/sample_linear_regression_data.txt
- """.stripMargin)
- }
+ def main(args: Array[String]) {
+ val defaultParams = Params()
+
+ val parser = new OptionParser[Params]("LinearRegression") {
+ head("LinearRegression: an example app for linear regression.")
+ opt[Int]("numIterations")
+ .text("number of iterations")
+ .action((x, c) => c.copy(numIterations = x))
+ opt[Double]("stepSize")
+ .text(s"initial step size, default: ${defaultParams.stepSize}")
+ .action((x, c) => c.copy(stepSize = x))
+ opt[String]("regType")
+ .text(s"regularization type (${RegType.values.mkString(",")}), " +
+ s"default: ${defaultParams.regType}")
+ .action((x, c) => c.copy(regType = RegType.withName(x)))
+ opt[Double]("regParam")
+ .text(s"regularization parameter, default: ${defaultParams.regParam}")
+ arg[String]("<input>")
+ .required()
+ .text("input paths to labeled examples in LIBSVM format")
+ .action((x, c) => c.copy(input = x))
+ note(
+ """
+ |For example, the following command runs this app on a synthetic dataset:
+ |
+ | bin/spark-submit --class org.apache.spark.examples.mllib.LinearRegression \
+ | examples/target/scala-*/spark-examples-*.jar \
+ | data/mllib/sample_linear_regression_data.txt
+ """.stripMargin)
+ }
- parser.parse(args, defaultParams).map { params =>
- run(params)
- } getOrElse {
- sys.exit(1)
+ parser.parse(args, defaultParams).map { params =>
+ run(params)
+ } getOrElse {
+ sys.exit(1)
+ }
}
def run(params: Params) {