diff options
author | Dongjoon Hyun <dongjoon@apache.org> | 2016-03-28 12:04:21 +0100 |
---|---|---|
committer | Sean Owen <sowen@cloudera.com> | 2016-03-28 12:04:21 +0100 |
commit | b66aa900619a86b7acbb7c3f96abc96ea2faa53c (patch) | |
tree | edd23acfb7a0ec84af0a322c8bd1b02a9fe139c4 /repl/scala-2.11/src/main | |
parent | 7b841540180e8d1403d6c95b02e93f129267b34f (diff) | |
download | spark-b66aa900619a86b7acbb7c3f96abc96ea2faa53c.tar.gz spark-b66aa900619a86b7acbb7c3f96abc96ea2faa53c.tar.bz2 spark-b66aa900619a86b7acbb7c3f96abc96ea2faa53c.zip |
[SPARK-14102][CORE] Block `reset` command in SparkShell
## What changes were proposed in this pull request?
Spark Shell provides an easy way to use Spark in Scala environment. This PR adds `reset` command to a blocked list, also cleaned up according to the Scala coding style.
```scala
scala> sc
res0: org.apache.spark.SparkContext = org.apache.spark.SparkContext718fad24
scala> :reset
scala> sc
<console>:11: error: not found: value sc
sc
^
```
If we blocks `reset`, Spark Shell works like the followings.
```scala
scala> :reset
reset: no such command. Type :help for help.
scala> :re
re is ambiguous: did you mean :replay or :require?
```
## How was this patch tested?
Manual. Run `bin/spark-shell` and type `:reset`.
Author: Dongjoon Hyun <dongjoon@apache.org>
Closes #11920 from dongjoon-hyun/SPARK-14102.
Diffstat (limited to 'repl/scala-2.11/src/main')
-rw-r--r-- | repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala | 17 |
1 files changed, 7 insertions, 10 deletions
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala index 7ed6d3b1f9..db09d6ace1 100644 --- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala +++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala @@ -19,12 +19,11 @@ package org.apache.spark.repl import java.io.BufferedReader -import Predef.{println => _, _} -import scala.util.Properties.{javaVersion, versionString, javaVmName} - -import scala.tools.nsc.interpreter.{JPrintWriter, ILoop} +import scala.Predef.{println => _, _} import scala.tools.nsc.Settings +import scala.tools.nsc.interpreter.{ILoop, JPrintWriter} import scala.tools.nsc.util.stringFromStream +import scala.util.Properties.{javaVersion, javaVmName, versionString} /** * A Spark-specific interactive shell. @@ -75,11 +74,9 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter) echo("Type :help for more information.") } - import LoopCommand.{ cmd, nullary } - - private val blockedCommands = Set("implicits", "javap", "power", "type", "kind") + private val blockedCommands = Set("implicits", "javap", "power", "type", "kind", "reset") - /** Standard commands **/ + /** Standard commands */ lazy val sparkStandardCommands: List[SparkILoop.this.LoopCommand] = standardCommands.filter(cmd => !blockedCommands(cmd.name)) @@ -112,9 +109,9 @@ object SparkILoop { val output = new JPrintWriter(new OutputStreamWriter(ostream), true) val repl = new SparkILoop(input, output) - if (sets.classpath.isDefault) + if (sets.classpath.isDefault) { sets.classpath.value = sys.props("java.class.path") - + } repl process sets } } |