aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README.md2
-rw-r--r--core/src/main/scala/spark/ClosureCleaner.scala3
-rw-r--r--core/src/main/scala/spark/PairRDDFunctions.scala3
-rw-r--r--core/src/main/scala/spark/RDD.scala2
-rw-r--r--project/DepJar.scala108
-rw-r--r--project/SparkBuild.scala53
-rw-r--r--project/build.properties9
-rw-r--r--project/build/SparkProject.scala107
-rw-r--r--project/plugins/SparkProjectPlugins.scala11
-rw-r--r--project/plugins/build.sbt13
-rw-r--r--project/plugins/project/SparkPluginBuild.scala7
-rw-r--r--repl/lib/jline.jarbin87543 -> 0 bytes
-rw-r--r--repl/lib/scala-jline.jarbin0 -> 158463 bytes
-rw-r--r--repl/src/main/scala/spark/repl/Main.scala8
-rw-r--r--repl/src/main/scala/spark/repl/SparkCompletionOutput.scala92
-rw-r--r--repl/src/main/scala/spark/repl/SparkHelper.scala5
-rw-r--r--repl/src/main/scala/spark/repl/SparkILoop.scala998
-rw-r--r--repl/src/main/scala/spark/repl/SparkIMain.scala1160
-rw-r--r--repl/src/main/scala/spark/repl/SparkISettings.scala63
-rw-r--r--repl/src/main/scala/spark/repl/SparkImports.scala214
-rw-r--r--repl/src/main/scala/spark/repl/SparkInteractiveReader.scala60
-rw-r--r--repl/src/main/scala/spark/repl/SparkInterpreter.scala1395
-rw-r--r--repl/src/main/scala/spark/repl/SparkInterpreterLoop.scala662
-rw-r--r--repl/src/main/scala/spark/repl/SparkInterpreterSettings.scala112
-rw-r--r--repl/src/main/scala/spark/repl/SparkJLineCompletion.scala (renamed from repl/src/main/scala/spark/repl/SparkCompletion.scala)266
-rw-r--r--repl/src/main/scala/spark/repl/SparkJLineReader.scala81
-rw-r--r--repl/src/main/scala/spark/repl/SparkMemberHandlers.scala207
-rw-r--r--repl/src/main/scala/spark/repl/SparkSimpleReader.scala33
-rw-r--r--repl/src/test/scala/spark/repl/ReplSuite.scala2
-rwxr-xr-xrun17
-rw-r--r--sbt/sbt-launch-0.10.1.jarbin0 -> 937683 bytes
-rw-r--r--sbt/sbt-launch-0.7.5.jarbin948813 -> 0 bytes
-rwxr-xr-xspark-shell2
33 files changed, 3058 insertions, 2637 deletions
diff --git a/README.md b/README.md
index 5f60b31025..b4fd39578a 100644
--- a/README.md
+++ b/README.md
@@ -18,7 +18,7 @@ Experimental support for Scala 2.9 is available in the `scala-2.9` branch.
The project is built using Simple Build Tool (SBT), which is packaged with it.
To build Spark and its example programs, run:
- sbt/sbt update compile
+ sbt/sbt compile
To run Spark, you will need to have Scala's bin in your `PATH`, or you
will need to set the `SCALA_HOME` environment variable to point to where
diff --git a/core/src/main/scala/spark/ClosureCleaner.scala b/core/src/main/scala/spark/ClosureCleaner.scala
index c21e49e3ae..9e12ff8722 100644
--- a/core/src/main/scala/spark/ClosureCleaner.scala
+++ b/core/src/main/scala/spark/ClosureCleaner.scala
@@ -64,6 +64,7 @@ object ClosureCleaner extends Logging {
accessedFields(cls) = Set[String]()
for (cls <- func.getClass :: innerClasses)
getClassReader(cls).accept(new FieldAccessFinder(accessedFields), 0)
+ //logInfo("accessedFields: " + accessedFields)
val isInterpNull = {
try {
@@ -134,6 +135,8 @@ class FieldAccessFinder(output: Map[Class[_], Set[String]]) extends EmptyVisitor
override def visitMethodInsn(op: Int, owner: String, name: String,
desc: String) {
+ // Check for calls a getter method for a variable in an interpreter wrapper object.
+ // This means that the corresponding field will be accessed, so we should save it.
if (op == INVOKEVIRTUAL && owner.endsWith("$iwC") && !name.endsWith("$outer"))
for (cl <- output.keys if cl.getName == owner.replace('/', '.'))
output(cl) += name
diff --git a/core/src/main/scala/spark/PairRDDFunctions.scala b/core/src/main/scala/spark/PairRDDFunctions.scala
index 260c31cb5e..71936eda02 100644
--- a/core/src/main/scala/spark/PairRDDFunctions.scala
+++ b/core/src/main/scala/spark/PairRDDFunctions.scala
@@ -219,7 +219,8 @@ class PairRDDFunctions[K: ClassManifest, V: ClassManifest](self: RDD[(K, V)]) ex
conf: JobConf = new JobConf) {
conf.setOutputKeyClass(keyClass)
conf.setOutputValueClass(valueClass)
- conf.setOutputFormat(outputFormatClass)
+ // conf.setOutputFormat(outputFormatClass) // Doesn't work in Scala 2.9 due to what may be a generics bug
+ conf.set("mapred.output.format.class", outputFormatClass.getName)
conf.setOutputCommitter(classOf[FileOutputCommitter])
FileOutputFormat.setOutputPath(conf, HadoopWriter.createPathFromString(path, conf))
saveAsHadoopDataset(conf)
diff --git a/core/src/main/scala/spark/RDD.scala b/core/src/main/scala/spark/RDD.scala
index 3aba2c4ff1..a0c4e29771 100644
--- a/core/src/main/scala/spark/RDD.scala
+++ b/core/src/main/scala/spark/RDD.scala
@@ -207,7 +207,7 @@ class FlatMappedRDD[U: ClassManifest, T: ClassManifest](
extends RDD[U](prev.context) {
override def splits = prev.splits
override val dependencies = List(new OneToOneDependency(prev))
- override def compute(split: Split) = prev.iterator(split).toStream.flatMap(f).iterator
+ override def compute(split: Split) = prev.iterator(split).flatMap(f)
}
class FilteredRDD[T: ClassManifest](
diff --git a/project/DepJar.scala b/project/DepJar.scala
new file mode 100644
index 0000000000..1d54005690
--- /dev/null
+++ b/project/DepJar.scala
@@ -0,0 +1,108 @@
+import sbt._
+import Keys._
+import java.io.PrintWriter
+import scala.collection.mutable
+import scala.io.Source
+import Project.Initialize
+
+/*
+ * This is based on the AssemblyPlugin. For now it was easier to copy and modify than to wait for
+ * the required changes needed for us to customise it so that it does what we want. We may revisit
+ * this in the future.
+ */
+object DepJarPlugin extends Plugin {
+ val DepJar = config("dep-jar") extend(Runtime)
+ val depJar = TaskKey[File]("dep-jar", "Builds a single-file jar of all dependencies.")
+
+ val jarName = SettingKey[String]("jar-name")
+ val outputPath = SettingKey[File]("output-path")
+ val excludedFiles = SettingKey[Seq[File] => Seq[File]]("excluded-files")
+ val conflictingFiles = SettingKey[Seq[File] => Seq[File]]("conflicting-files")
+
+ private def assemblyTask: Initialize[Task[File]] =
+ (test, packageOptions, cacheDirectory, outputPath,
+ fullClasspath, excludedFiles, conflictingFiles, streams) map {
+ (test, options, cacheDir, jarPath, cp, exclude, conflicting, s) =>
+ IO.withTemporaryDirectory { tempDir =>
+ val srcs = assemblyPaths(tempDir, cp, exclude, conflicting, s.log)
+ val config = new Package.Configuration(srcs, jarPath, options)
+ Package(config, cacheDir, s.log)
+ jarPath
+ }
+ }
+
+ private def assemblyPackageOptionsTask: Initialize[Task[Seq[PackageOption]]] =
+ (packageOptions in Compile, mainClass in DepJar) map { (os, mainClass) =>
+ mainClass map { s =>
+ os find { o => o.isInstanceOf[Package.MainClass] } map { _ => os
+ } getOrElse { Package.MainClass(s) +: os }
+ } getOrElse {os}
+ }
+
+ private def assemblyExcludedFiles(base: Seq[File]): Seq[File] = {
+ ((base / "scala" ** "*") +++ // exclude scala library
+ (base / "spark" ** "*") +++ // exclude Spark classes
+ ((base / "META-INF" ** "*") --- // generally ignore the hell out of META-INF
+ (base / "META-INF" / "services" ** "*") --- // include all service providers
+ (base / "META-INF" / "maven" ** "*"))).get // include all Maven POMs and such
+ }
+
+ private def assemblyPaths(tempDir: File, classpath: Classpath,
+ exclude: Seq[File] => Seq[File], conflicting: Seq[File] => Seq[File], log: Logger) = {
+ import sbt.classpath.ClasspathUtilities
+
+ val (libs, directories) = classpath.map(_.data).partition(ClasspathUtilities.isArchive)
+ val services = mutable.Map[String, mutable.ArrayBuffer[String]]()
+ for(jar <- libs) {
+ val jarName = jar.asFile.getName
+ log.info("Including %s".format(jarName))
+ IO.unzip(jar, tempDir)
+ IO.delete(conflicting(Seq(tempDir)))
+ val servicesDir = tempDir / "META-INF" / "services"
+ if (servicesDir.asFile.exists) {
+ for (service <- (servicesDir ** "*").get) {
+ val serviceFile = service.asFile
+ if (serviceFile.exists && serviceFile.isFile) {
+ val entries = services.getOrElseUpdate(serviceFile.getName, new mutable.ArrayBuffer[String]())
+ for (provider <- Source.fromFile(serviceFile).getLines) {
+ if (!entries.contains(provider)) {
+ entries += provider
+ }
+ }
+ }
+ }
+ }
+ }
+
+ for ((service, providers) <- services) {
+ log.debug("Merging providers for %s".format(service))
+ val serviceFile = (tempDir / "META-INF" / "services" / service).asFile
+ val writer = new PrintWriter(serviceFile)
+ for (provider <- providers.map { _.trim }.filter { !_.isEmpty }) {
+ log.debug("- %s".format(provider))
+ writer.println(provider)
+ }
+ writer.close()
+ }
+
+ val base = tempDir +: directories
+ val descendants = ((base ** (-DirectoryFilter)) --- exclude(base)).get
+ descendants x relativeTo(base)
+ }
+
+ lazy val depJarSettings = inConfig(DepJar)(Seq(
+ depJar <<= packageBin.identity,
+ packageBin <<= assemblyTask,
+ jarName <<= (name, version) { (name, version) => name + "-dep-" + version + ".jar" },
+ outputPath <<= (target, jarName) { (t, s) => t / s },
+ test <<= (test in Test).identity,
+ mainClass <<= (mainClass in Runtime).identity,
+ fullClasspath <<= (fullClasspath in Runtime).identity,
+ packageOptions <<= assemblyPackageOptionsTask,
+ excludedFiles := assemblyExcludedFiles _,
+ conflictingFiles := assemblyExcludedFiles _
+ )) ++
+ Seq(
+ depJar <<= (depJar in DepJar).identity
+ )
+} \ No newline at end of file
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
new file mode 100644
index 0000000000..23d13a8179
--- /dev/null
+++ b/project/SparkBuild.scala
@@ -0,0 +1,53 @@
+import sbt._
+import Keys._
+
+object SparkBuild extends Build {
+
+ lazy val root = Project("root", file("."), settings = sharedSettings) aggregate(core, repl, examples, bagel)
+
+ lazy val core = Project("core", file("core"), settings = coreSettings)
+
+ lazy val repl = Project("repl", file("repl"), settings = replSettings) dependsOn (core)
+
+ lazy val examples = Project("examples", file("examples"), settings = examplesSettings) dependsOn (core)
+
+ lazy val bagel = Project("bagel", file("bagel"), settings = bagelSettings) dependsOn (core)
+
+ def sharedSettings = Defaults.defaultSettings ++ Seq(
+ organization := "org.spark-project",
+ version := "0.4-SNAPSHOT",
+ scalaVersion := "2.9.0-1",
+ scalacOptions := Seq(/*"-deprecation",*/ "-unchecked"), // TODO Enable -deprecation and fix all warnings
+ unmanagedJars in Compile <<= baseDirectory map { base => (base ** "*.jar").classpath },
+ retrieveManaged := true,
+ transitiveClassifiers in Scope.GlobalScope := Seq("sources"),
+ testListeners <<= target.map(t => Seq(new eu.henkelmann.sbt.JUnitXmlTestsListener(t.getAbsolutePath))),
+ libraryDependencies ++= Seq(
+ "org.eclipse.jetty" % "jetty-server" % "7.4.2.v20110526",
+ "org.scalatest" % "scalatest_2.9.0" % "1.6.1" % "test",
+ "org.scala-tools.testing" % "scalacheck_2.9.0-1" % "1.9" % "test"
+ )
+ )
+
+ val slf4jVersion = "1.6.1"
+
+ def coreSettings = sharedSettings ++ Seq(libraryDependencies ++= Seq(
+ "com.google.guava" % "guava" % "r09",
+ "log4j" % "log4j" % "1.2.16",
+ "org.slf4j" % "slf4j-api" % slf4jVersion,
+ "org.slf4j" % "slf4j-log4j12" % slf4jVersion,
+ "com.ning" % "compress-lzf" % "0.7.0",
+ "org.apache.hadoop" % "hadoop-core" % "0.20.2",
+ "asm" % "asm-all" % "3.3.1",
+ "com.google.protobuf" % "protobuf-java" % "2.3.0",
+ "de.javakaffee" % "kryo-serializers" % "0.9"
+ )) ++ DepJarPlugin.depJarSettings
+
+ def replSettings = sharedSettings ++
+ Seq(libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _)) ++
+ DepJarPlugin.depJarSettings
+
+ def examplesSettings = sharedSettings ++ Seq(libraryDependencies += "colt" % "colt" % "1.2.0")
+
+ def bagelSettings = sharedSettings ++ DepJarPlugin.depJarSettings
+}
diff --git a/project/build.properties b/project/build.properties
index 1b5f8c4ea9..f47a3009ec 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -1,8 +1 @@
-#Project properties
-#Sat Nov 13 21:57:32 PST 2010
-project.organization=org.spark-project
-project.name=spark
-sbt.version=0.7.7
-project.version=0.4-SNAPSHOT
-build.scala.versions=2.8.1
-project.initialize=false
+sbt.version=0.10.1
diff --git a/project/build/SparkProject.scala b/project/build/SparkProject.scala
deleted file mode 100644
index 2354f47a76..0000000000
--- a/project/build/SparkProject.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-import sbt._
-import sbt.Process._
-
-import assembly._
-
-import de.element34.sbteclipsify._
-
-
-class SparkProject(info: ProjectInfo) extends ParentProject(info) with IdeaProject {
-
- lazy val core = project("core", "Spark Core", new CoreProject(_))
-
- lazy val repl = project("repl", "Spark REPL", new ReplProject(_), core)
-
- lazy val examples = project("examples", "Spark Examples", new ExamplesProject(_), core)
-
- lazy val bagel = project("bagel", "Bagel", new BagelProject(_), core)
-
- trait BaseProject extends BasicScalaProject with ScalaPaths with BasicPackagePaths with Eclipsify with IdeaProject {
- override def compileOptions = super.compileOptions ++ Seq(Unchecked)
-
- lazy val jettyServer = "org.eclipse.jetty" % "jetty-server" % "7.4.2.v20110526"
-
- override def packageDocsJar = defaultJarPath("-javadoc.jar")
- override def packageSrcJar= defaultJarPath("-sources.jar")
- lazy val sourceArtifact = Artifact.sources(artifactID)
- lazy val docsArtifact = Artifact.javadoc(artifactID)
- override def packageToPublishActions = super.packageToPublishActions ++ Seq(packageDocs, packageSrc)
- }
-
- class CoreProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport {
- val guava = "com.google.guava" % "guava" % "r09"
- val log4j = "log4j" % "log4j" % "1.2.16"
- val slf4jVersion = "1.6.1"
- val slf4jApi = "org.slf4j" % "slf4j-api" % slf4jVersion
- val slf4jLog4j = "org.slf4j" % "slf4j-log4j12" % slf4jVersion
- val compressLzf = "com.ning" % "compress-lzf" % "0.7.0"
- val hadoop = "org.apache.hadoop" % "hadoop-core" % "0.20.2"
- val asm = "asm" % "asm-all" % "3.3.1"
- val scalaTest = "org.scalatest" % "scalatest" % "1.3" % "test"
- val scalaCheck = "org.scala-tools.testing" %% "scalacheck" % "1.7" % "test"
- val protobuf = "com.google.protobuf" % "protobuf-java" % "2.3.0"
- val kryoSerializers = "de.javakaffee" % "kryo-serializers" % "0.9"
- }
-
- class ReplProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport
-
- class ExamplesProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject {
- val colt = "colt" % "colt" % "1.2.0"
- }
-
- class BagelProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport
-
- override def managedStyle = ManagedStyle.Maven
-}
-
-
-// Project mixin for an XML-based ScalaTest report. Unfortunately
-// there is currently no way to call this directly from SBT without
-// executing a subprocess.
-trait XmlTestReport extends BasicScalaProject {
- def testReportDir = outputPath / "test-report"
-
- lazy val testReport = task {
- log.info("Creating " + testReportDir + "...")
- if (!testReportDir.exists) {
- testReportDir.asFile.mkdirs()
- }
- log.info("Executing org.scalatest.tools.Runner...")
- val command = ("scala -classpath " + testClasspath.absString +
- " org.scalatest.tools.Runner -o " +
- " -u " + testReportDir.absolutePath +
- " -p " + (outputPath / "test-classes").absolutePath)
- Process(command, path("."), "JAVA_OPTS" -> "-Xmx500m") !
-
- None
- }.dependsOn(compile, testCompile).describedAs("Generate XML test report.")
-}
-
-
-// Project mixin for creating a JAR with a project's dependencies. This is based
-// on the AssemblyBuilder plugin, but because this plugin attempts to package Scala
-// and our project too, we leave that out using our own exclude filter (depJarExclude).
-trait DepJar extends AssemblyBuilder {
- def depJarExclude(base: PathFinder) = {
- (base / "scala" ** "*") +++ // exclude scala library
- (base / "spark" ** "*") +++ // exclude Spark classes
- ((base / "META-INF" ** "*") --- // generally ignore the hell out of META-INF
- (base / "META-INF" / "services" ** "*") --- // include all service providers
- (base / "META-INF" / "maven" ** "*")) // include all Maven POMs and such
- }
-
- def depJarTempDir = outputPath / "dep-classes"
-
- def depJarOutputPath =
- outputPath / (name.toLowerCase.replace(" ", "-") + "-dep-" + version.toString + ".jar")
-
- lazy val depJar = {
- packageTask(
- Path.lazyPathFinder(assemblyPaths(depJarTempDir,
- assemblyClasspath,
- assemblyExtraJars,
- depJarExclude)),
- depJarOutputPath,
- packageOptions)
- }.dependsOn(compile).describedAs("Bundle project's dependencies into a JAR.")
-}
diff --git a/project/plugins/SparkProjectPlugins.scala b/project/plugins/SparkProjectPlugins.scala
deleted file mode 100644
index 565f160829..0000000000
--- a/project/plugins/SparkProjectPlugins.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import sbt._
-
-class SparkProjectPlugins(info: ProjectInfo) extends PluginDefinition(info) {
- val eclipse = "de.element34" % "sbt-eclipsify" % "0.7.0"
-
- val sbtIdeaRepo = "sbt-idea-repo" at "http://mpeltonen.github.com/maven/"
- val sbtIdea = "com.github.mpeltonen" % "sbt-idea-plugin" % "0.4.0"
-
- val codaRepo = "Coda Hale's Repository" at "http://repo.codahale.com/"
- val assemblySBT = "com.codahale" % "assembly-sbt" % "0.1.1"
-}
diff --git a/project/plugins/build.sbt b/project/plugins/build.sbt
new file mode 100644
index 0000000000..91c6cb4df1
--- /dev/null
+++ b/project/plugins/build.sbt
@@ -0,0 +1,13 @@
+resolvers += {
+ val typesafeRepoUrl = new java.net.URL("http://repo.typesafe.com/typesafe/releases")
+ val pattern = Patterns(false, "[organisation]/[module]/[sbtversion]/[revision]/[type]s/[module](-[classifier])-[revision].[ext]")
+ Resolver.url("Typesafe Repository", typesafeRepoUrl)(pattern)
+}
+
+resolvers += "sbt-idea-repo" at "http://mpeltonen.github.com/maven/"
+
+libraryDependencies += "com.github.mpeltonen" %% "sbt-idea" % "0.10.0"
+
+libraryDependencies <<= (libraryDependencies, sbtVersion) { (deps, version) =>
+ deps :+ ("com.typesafe.sbteclipse" %% "sbteclipse" % "1.2" extra("sbtversion" -> version))
+}
diff --git a/project/plugins/project/SparkPluginBuild.scala b/project/plugins/project/SparkPluginBuild.scala
new file mode 100644
index 0000000000..999611982a
--- /dev/null
+++ b/project/plugins/project/SparkPluginBuild.scala
@@ -0,0 +1,7 @@
+import sbt._
+
+object SparkPluginDef extends Build {
+ lazy val root = Project("plugins", file(".")) dependsOn(junitXmlListener)
+ /* This is not published in a Maven repository, so we get it from GitHub directly */
+ lazy val junitXmlListener = uri("git://github.com/ijuma/junit_xml_listener.git#fe434773255b451a38e8d889536ebc260f4225ce")
+} \ No newline at end of file
diff --git a/repl/lib/jline.jar b/repl/lib/jline.jar
deleted file mode 100644
index 6ed67faab6..0000000000
--- a/repl/lib/jline.jar
+++ /dev/null
Binary files differ
diff --git a/repl/lib/scala-jline.jar b/repl/lib/scala-jline.jar
new file mode 100644
index 0000000000..2f18c95cdd
--- /dev/null
+++ b/repl/lib/scala-jline.jar
Binary files differ
diff --git a/repl/src/main/scala/spark/repl/Main.scala b/repl/src/main/scala/spark/repl/Main.scala
index f00df5aa58..b4a2bb05f9 100644
--- a/repl/src/main/scala/spark/repl/Main.scala
+++ b/repl/src/main/scala/spark/repl/Main.scala
@@ -3,14 +3,14 @@ package spark.repl
import scala.collection.mutable.Set
object Main {
- private var _interp: SparkInterpreterLoop = null
+ private var _interp: SparkILoop = null
def interp = _interp
- private[repl] def interp_=(i: SparkInterpreterLoop) { _interp = i }
+ private[repl] def interp_=(i: SparkILoop) { _interp = i }
def main(args: Array[String]) {
- _interp = new SparkInterpreterLoop
- _interp.main(args)
+ _interp = new SparkILoop
+ _interp.process(args)
}
}
diff --git a/repl/src/main/scala/spark/repl/SparkCompletionOutput.scala b/repl/src/main/scala/spark/repl/SparkCompletionOutput.scala
deleted file mode 100644
index 5ac46e3412..0000000000
--- a/repl/src/main/scala/spark/repl/SparkCompletionOutput.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package spark.repl
-
-import scala.tools.nsc
-import scala.tools.nsc._
-import scala.tools.nsc.interpreter
-import scala.tools.nsc.interpreter._
-
-/** This has a lot of duplication with other methods in Symbols and Types,
- * but repl completion utility is very sensitive to precise output. Best
- * thing would be to abstract an interface for how such things are printed,
- * as is also in progress with error messages.
- */
-trait SparkCompletionOutput {
- self: SparkCompletion =>
-
- import global._
- import definitions.{ NothingClass, AnyClass, isTupleType, isFunctionType, isRepeatedParamType }
-
- /** Reducing fully qualified noise for some common packages.
- */
- val typeTransforms = List(
- "java.lang." -> "",
- "scala.collection.immutable." -> "immutable.",
- "scala.collection.mutable." -> "mutable.",
- "scala.collection.generic." -> "generic."
- )
-
- def quietString(tp: String): String =
- typeTransforms.foldLeft(tp) {
- case (str, (prefix, replacement)) =>
- if (str startsWith prefix) replacement + (str stripPrefix prefix)
- else str
- }
-
- class MethodSymbolOutput(method: Symbol) {
- val pkg = method.ownerChain find (_.isPackageClass) map (_.fullName) getOrElse ""
-
- def relativize(str: String): String = quietString(str stripPrefix (pkg + "."))
- def relativize(tp: Type): String = relativize(tp.normalize.toString)
- def relativize(sym: Symbol): String = relativize(sym.info)
-
- def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]")
- def parenList(params: List[Any]) = params.mkString("(", ", ", ")")
-
- def methodTypeToString(mt: MethodType) =
- (mt.paramss map paramsString mkString "") + ": " + relativize(mt.finalResultType)
-
- def typeToString(tp: Type): String = relativize(
- tp match {
- case x if isFunctionType(x) => functionString(x)
- case x if isTupleType(x) => tupleString(x)
- case x if isRepeatedParamType(x) => typeToString(x.typeArgs.head) + "*"
- case mt @ MethodType(_, _) => methodTypeToString(mt)
- case x => x.toString
- }
- )
-
- def tupleString(tp: Type) = parenList(tp.normalize.typeArgs map relativize)
- def functionString(tp: Type) = tp.normalize.typeArgs match {
- case List(t, r) => t + " => " + r
- case xs => parenList(xs.init) + " => " + xs.last
- }
-
- def tparamsString(tparams: List[Symbol]) = braceList(tparams map (_.defString))
- def paramsString(params: List[Symbol]) = {
- def paramNameString(sym: Symbol) = if (sym.isSynthetic) "" else sym.nameString + ": "
- def paramString(sym: Symbol) = paramNameString(sym) + typeToString(sym.info.normalize)
-
- val isImplicit = params.nonEmpty && params.head.isImplicit
- val strs = (params map paramString) match {
- case x :: xs if isImplicit => ("implicit " + x) :: xs
- case xs => xs
- }
- parenList(strs)
- }
-
- def methodString() =
- method.keyString + " " + method.nameString + (method.info.normalize match {
- case PolyType(Nil, resType) => ": " + typeToString(resType) // nullary method
- case PolyType(tparams, resType) => tparamsString(tparams) + typeToString(resType)
- case mt @ MethodType(_, _) => methodTypeToString(mt)
- case x =>
- DBG("methodString(): %s / %s".format(x.getClass, x))
- x.toString
- })
- }
-}
diff --git a/repl/src/main/scala/spark/repl/SparkHelper.scala b/repl/src/main/scala/spark/repl/SparkHelper.scala
new file mode 100644
index 0000000000..d8fb7191b4
--- /dev/null
+++ b/repl/src/main/scala/spark/repl/SparkHelper.scala
@@ -0,0 +1,5 @@
+package scala.tools.nsc
+
+object SparkHelper {
+ def explicitParentLoader(settings: Settings) = settings.explicitParentLoader
+}
diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/spark/repl/SparkILoop.scala
new file mode 100644
index 0000000000..490efa69bc
--- /dev/null
+++ b/repl/src/main/scala/spark/repl/SparkILoop.scala
@@ -0,0 +1,998 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Alexander Spoon
+ */
+
+package spark.repl
+
+import scala.tools.nsc._
+import scala.tools.nsc.interpreter._
+
+import Predef.{ println => _, _ }
+import java.io.{ BufferedReader, FileReader, PrintWriter }
+import scala.sys.process.Process
+import session._
+import scala.tools.nsc.interpreter.{ Results => IR }
+import scala.tools.util.{ SignalManager, Signallable, Javap }
+import scala.annotation.tailrec
+import scala.util.control.Exception.{ ignoring }
+import scala.collection.mutable.ListBuffer
+import scala.concurrent.ops
+import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
+import interpreter._
+import io.{ File, Sources }
+
+import spark.Logging
+import spark.SparkContext
+
+/** The Scala interactive shell. It provides a read-eval-print loop
+ * around the Interpreter class.
+ * After instantiation, clients should call the main() method.
+ *
+ * If no in0 is specified, then input will come from the console, and
+ * the class will attempt to provide input editing feature such as
+ * input history.
+ *
+ * @author Moez A. Abdel-Gawad
+ * @author Lex Spoon
+ * @version 1.2
+ */
+class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master: Option[String])
+ extends AnyRef
+ with LoopCommands
+ with Logging
+{
+ def this(in0: BufferedReader, out: PrintWriter, master: String) = this(Some(in0), out, Some(master))
+ def this(in0: BufferedReader, out: PrintWriter) = this(Some(in0), out, None)
+ def this() = this(None, new PrintWriter(Console.out, true), None)
+
+ var in: InteractiveReader = _ // the input stream from which commands come
+ var settings: Settings = _
+ var intp: SparkIMain = _
+
+ /*
+ lazy val power = {
+ val g = intp.global
+ Power[g.type](this, g)
+ }
+ */
+
+ // TODO
+ // object opt extends AestheticSettings
+ //
+ @deprecated("Use `intp` instead.", "2.9.0")
+ def interpreter = intp
+
+ @deprecated("Use `intp` instead.", "2.9.0")
+ def interpreter_= (i: SparkIMain): Unit = intp = i
+
+ def history = in.history
+
+ /** The context class loader at the time this object was created */
+ protected val originalClassLoader = Thread.currentThread.getContextClassLoader
+
+ // Install a signal handler so we can be prodded.
+ private val signallable =
+ /*if (isReplDebug) Signallable("Dump repl state.")(dumpCommand())
+ else*/ null
+
+ // classpath entries added via :cp
+ var addedClasspath: String = ""
+
+ /** A reverse list of commands to replay if the user requests a :replay */
+ var replayCommandStack: List[String] = Nil
+
+ /** A list of commands to replay if the user requests a :replay */
+ def replayCommands = replayCommandStack.reverse
+
+ /** Record a command for replay should the user request a :replay */
+ def addReplay(cmd: String) = replayCommandStack ::= cmd
+
+ /** Try to install sigint handler: ignore failure. Signal handler
+ * will interrupt current line execution if any is in progress.
+ *
+ * Attempting to protect the repl from accidental exit, we only honor
+ * a single ctrl-C if the current buffer is empty: otherwise we look
+ * for a second one within a short time.
+ */
+ private def installSigIntHandler() {
+ def onExit() {
+ Console.println("") // avoiding "shell prompt in middle of line" syndrome
+ sys.exit(1)
+ }
+ ignoring(classOf[Exception]) {
+ SignalManager("INT") = {
+ if (intp == null)
+ onExit()
+ else if (intp.lineManager.running)
+ intp.lineManager.cancel()
+ else if (in.currentLine != "") {
+ // non-empty buffer, so make them hit ctrl-C a second time
+ SignalManager("INT") = onExit()
+ io.timer(5)(installSigIntHandler()) // and restore original handler if they don't
+ }
+ else onExit()
+ }
+ }
+ }
+
+ /** Close the interpreter and set the var to null. */
+ def closeInterpreter() {
+ if (intp ne null) {
+ intp.close
+ intp = null
+ Thread.currentThread.setContextClassLoader(originalClassLoader)
+ }
+ }
+
+ class SparkILoopInterpreter extends SparkIMain(settings, out) {
+ override lazy val formatting = new Formatting {
+ def prompt = SparkILoop.this.prompt
+ }
+ override protected def createLineManager() = new Line.Manager {
+ override def onRunaway(line: Line[_]): Unit = {
+ val template = """
+ |// She's gone rogue, captain! Have to take her out!
+ |// Calling Thread.stop on runaway %s with offending code:
+ |// scala> %s""".stripMargin
+
+ echo(template.format(line.thread, line.code))
+ // XXX no way to suppress the deprecation warning
+ line.thread.stop()
+ in.redrawLine()
+ }
+ }
+ override protected def parentClassLoader = {
+ SparkHelper.explicitParentLoader(settings).getOrElse( classOf[SparkILoop].getClassLoader )
+ }
+ }
+
+ /** Create a new interpreter. */
+ def createInterpreter() {
+ if (addedClasspath != "")
+ settings.classpath append addedClasspath
+
+ intp = new SparkILoopInterpreter
+ intp.setContextClassLoader()
+ installSigIntHandler()
+ }
+
+ /** print a friendly help message */
+ def helpCommand(line: String): Result = {
+ if (line == "") helpSummary()
+ else uniqueCommand(line) match {
+ case Some(lc) => echo("\n" + lc.longHelp)
+ case _ => ambiguousError(line)
+ }
+ }
+ private def helpSummary() = {
+ val usageWidth = commands map (_.usageMsg.length) max
+ val formatStr = "%-" + usageWidth + "s %s %s"
+
+ echo("All commands can be abbreviated, e.g. :he instead of :help.")
+ echo("Those marked with a * have more detailed help, e.g. :help imports.\n")
+
+ commands foreach { cmd =>
+ val star = if (cmd.hasLongHelp) "*" else " "
+ echo(formatStr.format(cmd.usageMsg, star, cmd.help))
+ }
+ }
+ private def ambiguousError(cmd: String): Result = {
+ matchingCommands(cmd) match {
+ case Nil => echo(cmd + ": no such command. Type :help for help.")
+ case xs => echo(cmd + " is ambiguous: did you mean " + xs.map(":" + _.name).mkString(" or ") + "?")
+ }
+ Result(true, None)
+ }
+ private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd)
+ private def uniqueCommand(cmd: String): Option[LoopCommand] = {
+ // this lets us add commands willy-nilly and only requires enough command to disambiguate
+ matchingCommands(cmd) match {
+ case List(x) => Some(x)
+ // exact match OK even if otherwise appears ambiguous
+ case xs => xs find (_.name == cmd)
+ }
+ }
+
+ /** Print a welcome message */
+ def printWelcome() {
+ echo("""Welcome to
+ ____ __
+ / __/__ ___ _____/ /__
+ _\ \/ _ \/ _ `/ __/ '_/
+ /___/ .__/\_,_/_/ /_/\_\ version 0.4
+ /_/
+""")
+ import Properties._
+ val welcomeMsg = "Using Scala %s (%s, Java %s)".format(
+ versionString, javaVmName, javaVersion)
+ echo(welcomeMsg)
+ }
+
+ /** Show the history */
+ lazy val historyCommand = new LoopCommand("history", "show the history (optional num is commands to show)") {
+ override def usage = "[num]"
+ def defaultLines = 20
+
+ def apply(line: String): Result = {
+ if (history eq NoHistory)
+ return "No history available."
+
+ val xs = words(line)
+ val current = history.index
+ val count = try xs.head.toInt catch { case _: Exception => defaultLines }
+ val lines = history.asStrings takeRight count
+ val offset = current - lines.size + 1
+
+ for ((line, index) <- lines.zipWithIndex)
+ echo("%3d %s".format(index + offset, line))
+ }
+ }
+
+ private def echo(msg: String) = {
+ out println msg
+ out.flush()
+ }
+ private def echoNoNL(msg: String) = {
+ out print msg
+ out.flush()
+ }
+
+ /** Search the history */
+ def searchHistory(_cmdline: String) {
+ val cmdline = _cmdline.toLowerCase
+ val offset = history.index - history.size + 1
+
+ for ((line, index) <- history.asStrings.zipWithIndex ; if line.toLowerCase contains cmdline)
+ echo("%d %s".format(index + offset, line))
+ }
+
+ private var currentPrompt = Properties.shellPromptString
+ def setPrompt(prompt: String) = currentPrompt = prompt
+ /** Prompt to print when awaiting input */
+ def prompt = currentPrompt
+
+ import LoopCommand.{ cmd, nullary }
+
+ /** Standard commands **/
+ lazy val standardCommands = List(
+ cmd("cp", "<path>", "add a jar or directory to the classpath", addClasspath),
+ cmd("help", "[command]", "print this summary or command-specific help", helpCommand),
+ historyCommand,
+ cmd("h?", "<string>", "search the history", searchHistory),
+ cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand),
+ cmd("implicits", "[-v]", "show the implicits in scope", implicitsCommand),
+ cmd("javap", "<path|class>", "disassemble a file or class name", javapCommand),
+ nullary("keybindings", "show how ctrl-[A-Z] and other keys are bound", keybindingsCommand),
+ cmd("load", "<path>", "load and interpret a Scala file", loadCommand),
+ nullary("paste", "enter paste mode: all input up to ctrl-D compiled together", pasteCommand),
+ //nullary("power", "enable power user mode", powerCmd),
+ nullary("quit", "exit the interpreter", () => Result(false, None)),
+ nullary("replay", "reset execution and replay all previous commands", replay),
+ shCommand,
+ nullary("silent", "disable/enable automatic printing of results", verbosity),
+ cmd("type", "<expr>", "display the type of an expression without evaluating it", typeCommand)
+ )
+
+ /** Power user commands */
+ lazy val powerCommands: List[LoopCommand] = List(
+ //nullary("dump", "displays a view of the interpreter's internal state", dumpCommand),
+ //cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand),
+ cmd("wrap", "<method>", "name of method to wrap around each repl line", wrapCommand) withLongHelp ("""
+ |:wrap
+ |:wrap clear
+ |:wrap <method>
+ |
+ |Installs a wrapper around each line entered into the repl.
+ |Currently it must be the simple name of an existing method
+ |with the specific signature shown in the following example.
+ |
+ |def timed[T](body: => T): T = {
+ | val start = System.nanoTime
+ | try body
+ | finally println((System.nanoTime - start) + " nanos elapsed.")
+ |}
+ |:wrap timed
+ |
+ |If given no argument, :wrap names the wrapper installed.
+ |An argument of clear will remove the wrapper if any is active.
+ |Note that wrappers do not compose (a new one replaces the old
+ |one) and also that the :phase command uses the same machinery,
+ |so setting :wrap will clear any :phase setting.
+ """.stripMargin.trim)
+ )
+
+ /*
+ private def dumpCommand(): Result = {
+ echo("" + power)
+ history.asStrings takeRight 30 foreach echo
+ in.redrawLine()
+ }
+ */
+
+ private val typeTransforms = List(
+ "scala.collection.immutable." -> "immutable.",
+ "scala.collection.mutable." -> "mutable.",
+ "scala.collection.generic." -> "generic.",
+ "java.lang." -> "jl.",
+ "scala.runtime." -> "runtime."
+ )
+
+ private def importsCommand(line: String): Result = {
+ val tokens = words(line)
+ val handlers = intp.languageWildcardHandlers ++ intp.importHandlers
+ val isVerbose = tokens contains "-v"
+
+ handlers.filterNot(_.importedSymbols.isEmpty).zipWithIndex foreach {
+ case (handler, idx) =>
+ val (types, terms) = handler.importedSymbols partition (_.name.isTypeName)
+ val imps = handler.implicitSymbols
+ val found = tokens filter (handler importsSymbolNamed _)
+ val typeMsg = if (types.isEmpty) "" else types.size + " types"
+ val termMsg = if (terms.isEmpty) "" else terms.size + " terms"
+ val implicitMsg = if (imps.isEmpty) "" else imps.size + " are implicit"
+ val foundMsg = if (found.isEmpty) "" else found.mkString(" // imports: ", ", ", "")
+ val statsMsg = List(typeMsg, termMsg, implicitMsg) filterNot (_ == "") mkString ("(", ", ", ")")
+
+ intp.reporter.printMessage("%2d) %-30s %s%s".format(
+ idx + 1,
+ handler.importString,
+ statsMsg,
+ foundMsg
+ ))
+ }
+ }
+
+ private def implicitsCommand(line: String): Result = {
+ val intp = SparkILoop.this.intp
+ import intp._
+ import global.Symbol
+
+ def p(x: Any) = intp.reporter.printMessage("" + x)
+
+ // If an argument is given, only show a source with that
+ // in its name somewhere.
+ val args = line split "\\s+"
+ val filtered = intp.implicitSymbolsBySource filter {
+ case (source, syms) =>
+ (args contains "-v") || {
+ if (line == "") (source.fullName.toString != "scala.Predef")
+ else (args exists (source.name.toString contains _))
+ }
+ }
+
+ if (filtered.isEmpty)
+ return "No implicits have been imported other than those in Predef."
+
+ filtered foreach {
+ case (source, syms) =>
+ p("/* " + syms.size + " implicit members imported from " + source.fullName + " */")
+
+ // This groups the members by where the symbol is defined
+ val byOwner = syms groupBy (_.owner)
+ val sortedOwners = byOwner.toList sortBy { case (owner, _) => intp.afterTyper(source.info.baseClasses indexOf owner) }
+
+ sortedOwners foreach {
+ case (owner, members) =>
+ // Within each owner, we cluster results based on the final result type
+ // if there are more than a couple, and sort each cluster based on name.
+ // This is really just trying to make the 100 or so implicits imported
+ // by default into something readable.
+ val memberGroups: List[List[Symbol]] = {
+ val groups = members groupBy (_.tpe.finalResultType) toList
+ val (big, small) = groups partition (_._2.size > 3)
+ val xss = (
+ (big sortBy (_._1.toString) map (_._2)) :+
+ (small flatMap (_._2))
+ )
+
+ xss map (xs => xs sortBy (_.name.toString))
+ }
+
+ val ownerMessage = if (owner == source) " defined in " else " inherited from "
+ p(" /* " + members.size + ownerMessage + owner.fullName + " */")
+
+ memberGroups foreach { group =>
+ group foreach (s => p(" " + intp.symbolDefString(s)))
+ p("")
+ }
+ }
+ p("")
+ }
+ }
+
+ protected def newJavap() = new Javap(intp.classLoader, new SparkIMain.ReplStrippingWriter(intp)) {
+ override def tryClass(path: String): Array[Byte] = {
+ // Look for Foo first, then Foo$, but if Foo$ is given explicitly,
+ // we have to drop the $ to find object Foo, then tack it back onto
+ // the end of the flattened name.
+ def className = intp flatName path
+ def moduleName = (intp flatName path.stripSuffix("$")) + "$"
+
+ val bytes = super.tryClass(className)
+ if (bytes.nonEmpty) bytes
+ else super.tryClass(moduleName)
+ }
+ }
+ private lazy val javap =
+ try newJavap()
+ catch { case _: Exception => null }
+
+ private def typeCommand(line: String): Result = {
+ intp.typeOfExpression(line) match {
+ case Some(tp) => tp.toString
+ case _ => "Failed to determine type."
+ }
+ }
+
+ private def javapCommand(line: String): Result = {
+ if (javap == null)
+ return ":javap unavailable on this platform."
+ if (line == "")
+ return ":javap [-lcsvp] [path1 path2 ...]"
+
+ javap(words(line)) foreach { res =>
+ if (res.isError) return "Failed: " + res.value
+ else res.show()
+ }
+ }
+ private def keybindingsCommand(): Result = {
+ if (in.keyBindings.isEmpty) "Key bindings unavailable."
+ else {
+ echo("Reading jline properties for default key bindings.")
+ echo("Accuracy not guaranteed: treat this as a guideline only.\n")
+ in.keyBindings foreach (x => echo ("" + x))
+ }
+ }
+ private def wrapCommand(line: String): Result = {
+ def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T"
+ val intp = SparkILoop.this.intp
+ val g: intp.global.type = intp.global
+ import g._
+
+ words(line) match {
+ case Nil =>
+ intp.executionWrapper match {
+ case "" => "No execution wrapper is set."
+ case s => "Current execution wrapper: " + s
+ }
+ case "clear" :: Nil =>
+ intp.executionWrapper match {
+ case "" => "No execution wrapper is set."
+ case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper."
+ }
+ case wrapper :: Nil =>
+ intp.typeOfExpression(wrapper) match {
+ case Some(PolyType(List(targ), MethodType(List(arg), restpe))) =>
+ intp setExecutionWrapper intp.pathToTerm(wrapper)
+ "Set wrapper to '" + wrapper + "'"
+ case Some(x) =>
+ failMsg + "\nFound: " + x
+ case _ =>
+ failMsg + "\nFound: <unknown>"
+ }
+ case _ => failMsg
+ }
+ }
+
+ private def pathToPhaseWrapper = intp.pathToTerm("$r") + ".phased.atCurrent"
+ /*
+ private def phaseCommand(name: String): Result = {
+ // This line crashes us in TreeGen:
+ //
+ // if (intp.power.phased set name) "..."
+ //
+ // Exception in thread "main" java.lang.AssertionError: assertion failed: ._7.type
+ // at scala.Predef$.assert(Predef.scala:99)
+ // at scala.tools.nsc.ast.TreeGen.mkAttributedQualifier(TreeGen.scala:69)
+ // at scala.tools.nsc.ast.TreeGen.mkAttributedQualifier(TreeGen.scala:44)
+ // at scala.tools.nsc.ast.TreeGen.mkAttributedRef(TreeGen.scala:101)
+ // at scala.tools.nsc.ast.TreeGen.mkAttributedStableRef(TreeGen.scala:143)
+ //
+ // But it works like so, type annotated.
+ val phased: Phased = power.phased
+ import phased.NoPhaseName
+
+ if (name == "clear") {
+ phased.set(NoPhaseName)
+ intp.clearExecutionWrapper()
+ "Cleared active phase."
+ }
+ else if (name == "") phased.get match {
+ case NoPhaseName => "Usage: :phase <expr> (e.g. typer, erasure.next, erasure+3)"
+ case ph => "Active phase is '%s'. (To clear, :phase clear)".format(phased.get)
+ }
+ else {
+ val what = phased.parse(name)
+ if (what.isEmpty || !phased.set(what))
+ "'" + name + "' does not appear to represent a valid phase."
+ else {
+ intp.setExecutionWrapper(pathToPhaseWrapper)
+ val activeMessage =
+ if (what.toString.length == name.length) "" + what
+ else "%s (%s)".format(what, name)
+
+ "Active phase is now: " + activeMessage
+ }
+ }
+ }
+ */
+
+ /** Available commands */
+ def commands: List[LoopCommand] = standardCommands /* ++ (
+ if (isReplPower) powerCommands else Nil
+ )*/
+
+ val replayQuestionMessage =
+ """|The repl compiler has crashed spectacularly. Shall I replay your
+ |session? I can re-run all lines except the last one.
+ |[y/n]
+ """.trim.stripMargin
+
+ private val crashRecovery: PartialFunction[Throwable, Unit] = {
+ case ex: Throwable =>
+ if (settings.YrichExes.value) {
+ val sources = implicitly[Sources]
+ echo("\n" + ex.getMessage)
+ echo(
+ if (isReplDebug) "[searching " + sources.path + " for exception contexts...]"
+ else "[searching for exception contexts...]"
+ )
+ echo(Exceptional(ex).force().context())
+ }
+ else {
+ echo(util.stackTraceString(ex))
+ }
+ ex match {
+ case _: NoSuchMethodError | _: NoClassDefFoundError =>
+ echo("Unrecoverable error.")
+ throw ex
+ case _ =>
+ def fn(): Boolean = in.readYesOrNo(replayQuestionMessage, { echo("\nYou must enter y or n.") ; fn() })
+ if (fn()) replay()
+ else echo("\nAbandoning crashed session.")
+ }
+ }
+
+ /** The main read-eval-print loop for the repl. It calls
+ * command() for each line of input, and stops when
+ * command() returns false.
+ */
+ def loop() {
+ def readOneLine() = {
+ out.flush()
+ in readLine prompt
+ }
+ // return false if repl should exit
+ def processLine(line: String): Boolean =
+ if (line eq null) false // assume null means EOF
+ else command(line) match {
+ case Result(false, _) => false
+ case Result(_, Some(finalLine)) => addReplay(finalLine) ; true
+ case _ => true
+ }
+
+ while (true) {
+ try if (!processLine(readOneLine)) return
+ catch crashRecovery
+ }
+ }
+
+ /** interpret all lines from a specified file */
+ def interpretAllFrom(file: File) {
+ val oldIn = in
+ val oldReplay = replayCommandStack
+
+ try file applyReader { reader =>
+ in = SimpleReader(reader, out, false)
+ echo("Loading " + file + "...")
+ loop()
+ }
+ finally {
+ in = oldIn
+ replayCommandStack = oldReplay
+ }
+ }
+
+ /** create a new interpreter and replay all commands so far */
+ def replay() {
+ closeInterpreter()
+ createInterpreter()
+ for (cmd <- replayCommands) {
+ echo("Replaying: " + cmd) // flush because maybe cmd will have its own output
+ command(cmd)
+ echo("")
+ }
+ }
+
+ /** fork a shell and run a command */
+ lazy val shCommand = new LoopCommand("sh", "run a shell command (result is implicitly => List[String])") {
+ override def usage = "<command line>"
+ def apply(line: String): Result = line match {
+ case "" => showUsage()
+ case _ =>
+ val toRun = classOf[ProcessResult].getName + "(" + string2codeQuoted(line) + ")"
+ intp interpret toRun
+ ()
+ }
+ }
+
+ def withFile(filename: String)(action: File => Unit) {
+ val f = File(filename)
+
+ if (f.exists) action(f)
+ else echo("That file does not exist")
+ }
+
+ def loadCommand(arg: String) = {
+ var shouldReplay: Option[String] = None
+ withFile(arg)(f => {
+ interpretAllFrom(f)
+ shouldReplay = Some(":load " + arg)
+ })
+ Result(true, shouldReplay)
+ }
+
+ def addClasspath(arg: String): Unit = {
+ val f = File(arg).normalize
+ if (f.exists) {
+ addedClasspath = ClassPath.join(addedClasspath, f.path)
+ val totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath)
+ echo("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, totalClasspath))
+ replay()
+ }
+ else echo("The path '" + f + "' doesn't seem to exist.")
+ }
+
+ def powerCmd(): Result = {
+ if (isReplPower) "Already in power mode."
+ else enablePowerMode()
+ }
+ def enablePowerMode() = {
+ //replProps.power setValue true
+ //power.unleash()
+ //echo(power.banner)
+ }
+
+ def verbosity() = {
+ val old = intp.printResults
+ intp.printResults = !old
+ echo("Switched " + (if (old) "off" else "on") + " result printing.")
+ }
+
+ /** Run one command submitted by the user. Two values are returned:
+ * (1) whether to keep running, (2) the line to record for replay,
+ * if any. */
+ def command(line: String): Result = {
+ if (line startsWith ":") {
+ val cmd = line.tail takeWhile (x => !x.isWhitespace)
+ uniqueCommand(cmd) match {
+ case Some(lc) => lc(line.tail stripPrefix cmd dropWhile (_.isWhitespace))
+ case _ => ambiguousError(cmd)
+ }
+ }
+ else if (intp.global == null) Result(false, None) // Notice failure to create compiler
+ else Result(true, interpretStartingWith(line))
+ }
+
+ private def readWhile(cond: String => Boolean) = {
+ Iterator continually in.readLine("") takeWhile (x => x != null && cond(x))
+ }
+
+ def pasteCommand(): Result = {
+ echo("// Entering paste mode (ctrl-D to finish)\n")
+ val code = readWhile(_ => true) mkString "\n"
+ echo("\n// Exiting paste mode, now interpreting.\n")
+ intp interpret code
+ ()
+ }
+
+ private object paste extends Pasted {
+ val ContinueString = " | "
+ val PromptString = "scala> "
+
+ def interpret(line: String): Unit = {
+ echo(line.trim)
+ intp interpret line
+ echo("")
+ }
+
+ def transcript(start: String) = {
+ // Printing this message doesn't work very well because it's buried in the
+ // transcript they just pasted. Todo: a short timer goes off when
+ // lines stop coming which tells them to hit ctrl-D.
+ //
+ // echo("// Detected repl transcript paste: ctrl-D to finish.")
+ apply(Iterator(start) ++ readWhile(_.trim != PromptString.trim))
+ }
+ }
+ import paste.{ ContinueString, PromptString }
+
+ /** Interpret expressions starting with the first line.
+ * Read lines until a complete compilation unit is available
+ * or until a syntax error has been seen. If a full unit is
+ * read, go ahead and interpret it. Return the full string
+ * to be recorded for replay, if any.
+ */
+ def interpretStartingWith(code: String): Option[String] = {
+ // signal completion non-completion input has been received
+ in.completion.resetVerbosity()
+
+ def reallyInterpret = {
+ val reallyResult = intp.interpret(code)
+ (reallyResult, reallyResult match {
+ case IR.Error => None
+ case IR.Success => Some(code)
+ case IR.Incomplete =>
+ if (in.interactive && code.endsWith("\n\n")) {
+ echo("You typed two blank lines. Starting a new command.")
+ None
+ }
+ else in.readLine(ContinueString) match {
+ case null =>
+ // we know compilation is going to fail since we're at EOF and the
+ // parser thinks the input is still incomplete, but since this is
+ // a file being read non-interactively we want to fail. So we send
+ // it straight to the compiler for the nice error message.
+ intp.compileString(code)
+ None
+
+ case line => interpretStartingWith(code + "\n" + line)
+ }
+ })
+ }
+
+ /** Here we place ourselves between the user and the interpreter and examine
+ * the input they are ostensibly submitting. We intervene in several cases:
+ *
+ * 1) If the line starts with "scala> " it is assumed to be an interpreter paste.
+ * 2) If the line starts with "." (but not ".." or "./") it is treated as an invocation
+ * on the previous result.
+ * 3) If the Completion object's execute returns Some(_), we inject that value
+ * and avoid the interpreter, as it's likely not valid scala code.
+ */
+ if (code == "") None
+ else if (!paste.running && code.trim.startsWith(PromptString)) {
+ paste.transcript(code)
+ None
+ }
+ else if (Completion.looksLikeInvocation(code) && intp.mostRecentVar != "") {
+ interpretStartingWith(intp.mostRecentVar + code)
+ }
+ else {
+ def runCompletion = in.completion execute code map (intp bindValue _)
+ /** Due to my accidentally letting file completion execution sneak ahead
+ * of actual parsing this now operates in such a way that the scala
+ * interpretation always wins. However to avoid losing useful file
+ * completion I let it fail and then check the others. So if you
+ * type /tmp it will echo a failure and then give you a Directory object.
+ * It's not pretty: maybe I'll implement the silence bits I need to avoid
+ * echoing the failure.
+ */
+ if (intp isParseable code) {
+ val (code, result) = reallyInterpret
+ //if (power != null && code == IR.Error)
+ // runCompletion
+
+ result
+ }
+ else runCompletion match {
+ case Some(_) => None // completion hit: avoid the latent error
+ case _ => reallyInterpret._2 // trigger the latent error
+ }
+ }
+ }
+
+ // runs :load `file` on any files passed via -i
+ def loadFiles(settings: Settings) = settings match {
+ case settings: GenericRunnerSettings =>
+ for (filename <- settings.loadfiles.value) {
+ val cmd = ":load " + filename
+ command(cmd)
+ addReplay(cmd)
+ echo("")
+ }
+ case _ =>
+ }
+
+ /** Tries to create a JLineReader, falling back to SimpleReader:
+ * unless settings or properties are such that it should start
+ * with SimpleReader.
+ */
+ def chooseReader(settings: Settings): InteractiveReader = {
+ if (settings.Xnojline.value || Properties.isEmacsShell)
+ SimpleReader()
+ else try JLineReader(
+ if (settings.noCompletion.value) NoCompletion
+ else new JLineCompletion(intp)
+ )
+ catch {
+ case ex @ (_: Exception | _: NoClassDefFoundError) =>
+ echo("Failed to created JLineReader: " + ex + "\nFalling back to SimpleReader.")
+ SimpleReader()
+ }
+ }
+
+ def initializeSpark() {
+ intp.beQuietDuring {
+ command("""
+ spark.repl.Main.interp.out.println("Creating SparkContext...");
+ spark.repl.Main.interp.out.flush();
+ @transient val sc = spark.repl.Main.interp.createSparkContext();
+ sc.waitForRegister();
+ spark.repl.Main.interp.out.println("Spark context available as sc.");
+ spark.repl.Main.interp.out.flush();
+ """)
+ command("import spark.SparkContext._");
+ }
+ echo("Type in expressions to have them evaluated.")
+ echo("Type :help for more information.")
+ }
+
+ var sparkContext: SparkContext = null
+
+ def createSparkContext(): SparkContext = {
+ val master = this.master match {
+ case Some(m) => m
+ case None => {
+ val prop = System.getenv("MASTER")
+ if (prop != null) prop else "local"
+ }
+ }
+ sparkContext = new SparkContext(master, "Spark shell")
+ sparkContext
+ }
+
+ def process(settings: Settings): Boolean = {
+ // Ensure logging is initialized before any Spark threads try to use logs
+ // (because SLF4J initialization is not thread safe)
+ initLogging()
+
+ printWelcome()
+ echo("Initializing interpreter...")
+
+ this.settings = settings
+ createInterpreter()
+
+ // sets in to some kind of reader depending on environmental cues
+ in = in0 match {
+ case Some(reader) => SimpleReader(reader, out, true)
+ case None => chooseReader(settings)
+ }
+
+ loadFiles(settings)
+ // it is broken on startup; go ahead and exit
+ if (intp.reporter.hasErrors)
+ return false
+
+ try {
+ // this is about the illusion of snappiness. We call initialize()
+ // which spins off a separate thread, then print the prompt and try
+ // our best to look ready. Ideally the user will spend a
+ // couple seconds saying "wow, it starts so fast!" and by the time
+ // they type a command the compiler is ready to roll.
+ intp.initialize()
+ initializeSpark()
+ if (isReplPower) {
+ echo("Starting in power mode, one moment...\n")
+ enablePowerMode()
+ }
+ loop()
+ }
+ finally closeInterpreter()
+ true
+ }
+
+ /** process command-line arguments and do as they request */
+ def process(args: Array[String]): Boolean = {
+ val command = new CommandLine(args.toList, msg => echo("scala: " + msg))
+ def neededHelp(): String =
+ (if (command.settings.help.value) command.usageMsg + "\n" else "") +
+ (if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "")
+
+ // if they asked for no help and command is valid, we call the real main
+ neededHelp() match {
+ case "" => command.ok && process(command.settings)
+ case help => echoNoNL(help) ; true
+ }
+ }
+
+ @deprecated("Use `process` instead", "2.9.0")
+ def main(args: Array[String]): Unit = {
+ if (isReplDebug)
+ System.out.println(new java.util.Date)
+
+ process(args)
+ }
+ @deprecated("Use `process` instead", "2.9.0")
+ def main(settings: Settings): Unit = process(settings)
+}
+
+object SparkILoop {
+ implicit def loopToInterpreter(repl: SparkILoop): SparkIMain = repl.intp
+ private def echo(msg: String) = Console println msg
+
+ // Designed primarily for use by test code: take a String with a
+ // bunch of code, and prints out a transcript of what it would look
+ // like if you'd just typed it into the repl.
+ def runForTranscript(code: String, settings: Settings): String = {
+ import java.io.{ BufferedReader, StringReader, OutputStreamWriter }
+
+ stringFromStream { ostream =>
+ Console.withOut(ostream) {
+ val output = new PrintWriter(new OutputStreamWriter(ostream), true) {
+ override def write(str: String) = {
+ // completely skip continuation lines
+ if (str forall (ch => ch.isWhitespace || ch == '|')) ()
+ // print a newline on empty scala prompts
+ else if ((str contains '\n') && (str.trim == "scala> ")) super.write("\n")
+ else super.write(str)
+ }
+ }
+ val input = new BufferedReader(new StringReader(code)) {
+ override def readLine(): String = {
+ val s = super.readLine()
+ // helping out by printing the line being interpreted.
+ if (s != null)
+ output.println(s)
+ s
+ }
+ }
+ val repl = new SparkILoop(input, output)
+ if (settings.classpath.isDefault)
+ settings.classpath.value = sys.props("java.class.path")
+
+ repl process settings
+ }
+ }
+ }
+
+ /** Creates an interpreter loop with default settings and feeds
+ * the given code to it as input.
+ */
+ def run(code: String, sets: Settings = new Settings): String = {
+ import java.io.{ BufferedReader, StringReader, OutputStreamWriter }
+
+ stringFromStream { ostream =>
+ Console.withOut(ostream) {
+ val input = new BufferedReader(new StringReader(code))
+ val output = new PrintWriter(new OutputStreamWriter(ostream), true)
+ val repl = new SparkILoop(input, output)
+
+ if (sets.classpath.isDefault)
+ sets.classpath.value = sys.props("java.class.path")
+
+ repl process sets
+ }
+ }
+ }
+ def run(lines: List[String]): String = run(lines map (_ + "\n") mkString)
+
+ // provide the enclosing type T
+ // in order to set up the interpreter's classpath and parent class loader properly
+ def breakIf[T: Manifest](assertion: => Boolean, args: NamedParam*): Unit =
+ if (assertion) break[T](args.toList)
+
+ // start a repl, binding supplied args
+ def break[T: Manifest](args: List[NamedParam]): Unit = {
+ val msg = if (args.isEmpty) "" else " Binding " + args.size + " value%s.".format(
+ if (args.size == 1) "" else "s"
+ )
+ echo("Debug repl starting." + msg)
+ val repl = new SparkILoop {
+ override def prompt = "\ndebug> "
+ }
+ repl.settings = new Settings(echo)
+ repl.settings.embeddedDefaults[T]
+ repl.createInterpreter()
+ repl.in = JLineReader(repl)
+
+ // rebind exit so people don't accidentally call sys.exit by way of predef
+ repl.quietRun("""def exit = println("Type :quit to resume program execution.")""")
+ args foreach (p => repl.bind(p.name, p.tpe, p.value))
+ repl.loop()
+
+ echo("\nDebug repl exiting.")
+ repl.closeInterpreter()
+ }
+}
diff --git a/repl/src/main/scala/spark/repl/SparkIMain.scala b/repl/src/main/scala/spark/repl/SparkIMain.scala
new file mode 100644
index 0000000000..fb4b9f9b40
--- /dev/null
+++ b/repl/src/main/scala/spark/repl/SparkIMain.scala
@@ -0,0 +1,1160 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package spark.repl
+
+import scala.tools.nsc._
+import scala.tools.nsc.interpreter._
+
+import Predef.{ println => _, _ }
+import java.io.{ PrintWriter }
+import java.lang.reflect
+import java.net.URL
+import util.{ Set => _, _ }
+import io.{ AbstractFile, PlainFile, VirtualDirectory }
+import reporters.{ ConsoleReporter, Reporter }
+import symtab.{ Flags, Names }
+import scala.tools.nsc.interpreter.{ Results => IR }
+import scala.tools.util.PathResolver
+import scala.tools.nsc.util.{ ScalaClassLoader, Exceptional }
+import ScalaClassLoader.URLClassLoader
+import Exceptional.unwrap
+import scala.collection.{ mutable, immutable }
+import scala.PartialFunction.{ cond, condOpt }
+import scala.util.control.Exception.{ ultimately }
+import scala.reflect.NameTransformer
+import SparkIMain._
+
+import spark.HttpServer
+import spark.Utils
+import spark.SparkEnv
+
+/** An interpreter for Scala code.
+ *
+ * The main public entry points are compile(), interpret(), and bind().
+ * The compile() method loads a complete Scala file. The interpret() method
+ * executes one line of Scala code at the request of the user. The bind()
+ * method binds an object to a variable that can then be used by later
+ * interpreted code.
+ *
+ * The overall approach is based on compiling the requested code and then
+ * using a Java classloader and Java reflection to run the code
+ * and access its results.
+ *
+ * In more detail, a single compiler instance is used
+ * to accumulate all successfully compiled or interpreted Scala code. To
+ * "interpret" a line of code, the compiler generates a fresh object that
+ * includes the line of code and which has public member(s) to export
+ * all variables defined by that code. To extract the result of an
+ * interpreted line to show the user, a second "result object" is created
+ * which imports the variables exported by the above object and then
+ * exports a single member named "$export". To accomodate user expressions
+ * that read from variables or methods defined in previous statements, "import"
+ * statements are used.
+ *
+ * This interpreter shares the strengths and weaknesses of using the
+ * full compiler-to-Java. The main strength is that interpreted code
+ * behaves exactly as does compiled code, including running at full speed.
+ * The main weakness is that redefining classes and methods is not handled
+ * properly, because rebinding at the Java level is technically difficult.
+ *
+ * @author Moez A. Abdel-Gawad
+ * @author Lex Spoon
+ */
+class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends SparkImports {
+ imain =>
+
+ /** construct an interpreter that reports to Console */
+ def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
+ def this() = this(new Settings())
+
+ /** whether to print out result lines */
+ var printResults: Boolean = true
+
+ /** whether to print errors */
+ var totalSilence: Boolean = false
+
+ private val RESULT_OBJECT_PREFIX = "RequestResult$"
+
+ lazy val formatting: Formatting = new Formatting {
+ val prompt = Properties.shellPromptString
+ }
+ import formatting._
+
+ val SPARK_DEBUG_REPL: Boolean = (System.getenv("SPARK_DEBUG_REPL") == "1")
+
+ /** Local directory to save .class files too */
+ val outputDir = {
+ val tmp = System.getProperty("java.io.tmpdir")
+ val rootDir = System.getProperty("spark.repl.classdir", tmp)
+ Utils.createTempDir(rootDir)
+ }
+ if (SPARK_DEBUG_REPL) {
+ echo("Output directory: " + outputDir)
+ }
+
+ /** Scala compiler virtual directory for outputDir */
+ val virtualDirectory = new PlainFile(outputDir)
+
+ /** Jetty server that will serve our classes to worker nodes */
+ val classServer = new HttpServer(outputDir)
+
+ // Start the classServer and store its URI in a spark system property
+ // (which will be passed to executors so that they can connect to it)
+ classServer.start()
+ System.setProperty("spark.repl.class.uri", classServer.uri)
+ if (SPARK_DEBUG_REPL) {
+ echo("Class server started, URI = " + classServer.uri)
+ }
+
+ /*
+ // directory to save .class files to
+ val virtualDirectory = new VirtualDirectory("(memory)", None) {
+ private def pp(root: io.AbstractFile, indentLevel: Int) {
+ val spaces = " " * indentLevel
+ out.println(spaces + root.name)
+ if (root.isDirectory)
+ root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1))
+ }
+ // print the contents hierarchically
+ def show() = pp(this, 0)
+ }
+ */
+
+ /** reporter */
+ lazy val reporter: ConsoleReporter = new SparkIMain.ReplReporter(this)
+ import reporter.{ printMessage, withoutTruncating }
+
+ // not sure if we have some motivation to print directly to console
+ private def echo(msg: String) { Console println msg }
+
+ // protected def defaultImports: List[String] = List("_root_.scala.sys.exit")
+
+ /** We're going to go to some trouble to initialize the compiler asynchronously.
+ * It's critical that nothing call into it until it's been initialized or we will
+ * run into unrecoverable issues, but the perceived repl startup time goes
+ * through the roof if we wait for it. So we initialize it with a future and
+ * use a lazy val to ensure that any attempt to use the compiler object waits
+ * on the future.
+ */
+ private val _compiler: Global = newCompiler(settings, reporter)
+ private var _initializeComplete = false
+ def isInitializeComplete = _initializeComplete
+
+ private def _initialize(): Boolean = {
+ val source = """
+ |class $repl_$init {
+ | List(1) map (_ + 1)
+ |}
+ |""".stripMargin
+
+ val result = try {
+ new _compiler.Run() compileSources List(new BatchSourceFile("<init>", source))
+ if (isReplDebug || settings.debug.value) {
+ // Can't use printMessage here, it deadlocks
+ Console.println("Repl compiler initialized.")
+ }
+ // addImports(defaultImports: _*)
+ true
+ }
+ catch {
+ case x: AbstractMethodError =>
+ printMessage("""
+ |Failed to initialize compiler: abstract method error.
+ |This is most often remedied by a full clean and recompile.
+ |""".stripMargin
+ )
+ x.printStackTrace()
+ false
+ case x: MissingRequirementError => printMessage("""
+ |Failed to initialize compiler: %s not found.
+ |** Note that as of 2.8 scala does not assume use of the java classpath.
+ |** For the old behavior pass -usejavacp to scala, or if using a Settings
+ |** object programatically, settings.usejavacp.value = true.""".stripMargin.format(x.req)
+ )
+ false
+ }
+
+ try result
+ finally _initializeComplete = result
+ }
+
+ // set up initialization future
+ private var _isInitialized: () => Boolean = null
+ def initialize() = synchronized {
+ if (_isInitialized == null)
+ _isInitialized = scala.concurrent.ops future _initialize()
+ }
+
+ /** the public, go through the future compiler */
+ lazy val global: Global = {
+ initialize()
+
+ // blocks until it is ; false means catastrophic failure
+ if (_isInitialized()) _compiler
+ else null
+ }
+ @deprecated("Use `global` for access to the compiler instance.", "2.9.0")
+ lazy val compiler: global.type = global
+
+ import global._
+
+ object naming extends {
+ val global: imain.global.type = imain.global
+ } with Naming {
+ // make sure we don't overwrite their unwisely named res3 etc.
+ override def freshUserVarName(): String = {
+ val name = super.freshUserVarName()
+ if (definedNameMap contains name) freshUserVarName()
+ else name
+ }
+ }
+ import naming._
+
+ // object dossiers extends {
+ // val intp: imain.type = imain
+ // } with Dossiers { }
+ // import dossiers._
+
+ lazy val memberHandlers = new {
+ val intp: imain.type = imain
+ } with SparkMemberHandlers
+ import memberHandlers._
+
+ def atPickler[T](op: => T): T = atPhase(currentRun.picklerPhase)(op)
+ def afterTyper[T](op: => T): T = atPhase(currentRun.typerPhase.next)(op)
+
+ /** Temporarily be quiet */
+ def beQuietDuring[T](operation: => T): T = {
+ val wasPrinting = printResults
+ ultimately(printResults = wasPrinting) {
+ if (isReplDebug) echo(">> beQuietDuring")
+ else printResults = false
+
+ operation
+ }
+ }
+ def beSilentDuring[T](operation: => T): T = {
+ val saved = totalSilence
+ totalSilence = true
+ try operation
+ finally totalSilence = saved
+ }
+
+ def quietRun[T](code: String) = beQuietDuring(interpret(code))
+
+ /** whether to bind the lastException variable */
+ private var bindLastException = true
+
+ /** A string representing code to be wrapped around all lines. */
+ private var _executionWrapper: String = ""
+ def executionWrapper = _executionWrapper
+ def setExecutionWrapper(code: String) = _executionWrapper = code
+ def clearExecutionWrapper() = _executionWrapper = ""
+
+ /** Temporarily stop binding lastException */
+ def withoutBindingLastException[T](operation: => T): T = {
+ val wasBinding = bindLastException
+ ultimately(bindLastException = wasBinding) {
+ bindLastException = false
+ operation
+ }
+ }
+
+ protected def createLineManager(): Line.Manager = new Line.Manager
+ lazy val lineManager = createLineManager()
+
+ /** interpreter settings */
+ lazy val isettings = new SparkISettings(this)
+
+ /** Instantiate a compiler. Subclasses can override this to
+ * change the compiler class used by this interpreter. */
+ protected def newCompiler(settings: Settings, reporter: Reporter) = {
+ settings.outputDirs setSingleOutput virtualDirectory
+ settings.exposeEmptyPackage.value = true
+ new Global(settings, reporter)
+ }
+
+ /** the compiler's classpath, as URL's */
+ lazy val compilerClasspath: List[URL] = new PathResolver(settings) asURLs
+
+ /* A single class loader is used for all commands interpreted by this Interpreter.
+ It would also be possible to create a new class loader for each command
+ to interpret. The advantages of the current approach are:
+
+ - Expressions are only evaluated one time. This is especially
+ significant for I/O, e.g. "val x = Console.readLine"
+
+ The main disadvantage is:
+
+ - Objects, classes, and methods cannot be rebound. Instead, definitions
+ shadow the old ones, and old code objects refer to the old
+ definitions.
+ */
+ private var _classLoader: AbstractFileClassLoader = null
+ def resetClassLoader() = _classLoader = makeClassLoader()
+ def classLoader: AbstractFileClassLoader = {
+ if (_classLoader == null)
+ resetClassLoader()
+
+ _classLoader
+ }
+ private def makeClassLoader(): AbstractFileClassLoader = {
+ val parent =
+ if (parentClassLoader == null) ScalaClassLoader fromURLs compilerClasspath
+ else new URLClassLoader(compilerClasspath, parentClassLoader)
+
+ new AbstractFileClassLoader(virtualDirectory, parent) {
+ /** Overridden here to try translating a simple name to the generated
+ * class name if the original attempt fails. This method is used by
+ * getResourceAsStream as well as findClass.
+ */
+ override protected def findAbstractFile(name: String): AbstractFile = {
+ super.findAbstractFile(name) match {
+ // deadlocks on startup if we try to translate names too early
+ case null if isInitializeComplete => generatedName(name) map (x => super.findAbstractFile(x)) orNull
+ case file => file
+ }
+ }
+ }
+ }
+ private def loadByName(s: String): JClass =
+ (classLoader tryToInitializeClass s) getOrElse sys.error("Failed to load expected class: '" + s + "'")
+
+ protected def parentClassLoader: ClassLoader =
+ SparkHelper.explicitParentLoader(settings).getOrElse( this.getClass.getClassLoader() )
+
+ def getInterpreterClassLoader() = classLoader
+
+ // Set the current Java "context" class loader to this interpreter's class loader
+ def setContextClassLoader() = classLoader.setAsContext()
+
+ /** Given a simple repl-defined name, returns the real name of
+ * the class representing it, e.g. for "Bippy" it may return
+ *
+ * $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$Bippy
+ */
+ def generatedName(simpleName: String): Option[String] = {
+ if (simpleName endsWith "$") optFlatName(simpleName.init) map (_ + "$")
+ else optFlatName(simpleName)
+ }
+ def flatName(id: String) = optFlatName(id) getOrElse id
+ def optFlatName(id: String) = requestForIdent(id) map (_ fullFlatName id)
+
+ def allDefinedNames = definedNameMap.keys.toList sortBy (_.toString)
+ def pathToType(id: String): String = pathToName(newTypeName(id))
+ def pathToTerm(id: String): String = pathToName(newTermName(id))
+ def pathToName(name: Name): String = {
+ if (definedNameMap contains name)
+ definedNameMap(name) fullPath name
+ else name.toString
+ }
+
+ /** Most recent tree handled which wasn't wholly synthetic. */
+ private def mostRecentlyHandledTree: Option[Tree] = {
+ prevRequests.reverse foreach { req =>
+ req.handlers.reverse foreach {
+ case x: MemberDefHandler if x.definesValue && !isInternalVarName(x.name) => return Some(x.member)
+ case _ => ()
+ }
+ }
+ None
+ }
+
+ /** Stubs for work in progress. */
+ def handleTypeRedefinition(name: TypeName, old: Request, req: Request) = {
+ for (t1 <- old.simpleNameOfType(name) ; t2 <- req.simpleNameOfType(name)) {
+ DBG("Redefining type '%s'\n %s -> %s".format(name, t1, t2))
+ }
+ }
+
+ def handleTermRedefinition(name: TermName, old: Request, req: Request) = {
+ for (t1 <- old.compilerTypeOf get name ; t2 <- req.compilerTypeOf get name) {
+ // Printing the types here has a tendency to cause assertion errors, like
+ // assertion failed: fatal: <refinement> has owner value x, but a class owner is required
+ // so DBG is by-name now to keep it in the family. (It also traps the assertion error,
+ // but we don't want to unnecessarily risk hosing the compiler's internal state.)
+ DBG("Redefining term '%s'\n %s -> %s".format(name, t1, t2))
+ }
+ }
+ def recordRequest(req: Request) {
+ if (req == null || referencedNameMap == null)
+ return
+
+ prevRequests += req
+ req.referencedNames foreach (x => referencedNameMap(x) = req)
+
+ // warning about serially defining companions. It'd be easy
+ // enough to just redefine them together but that may not always
+ // be what people want so I'm waiting until I can do it better.
+ if (!settings.nowarnings.value) {
+ for {
+ name <- req.definedNames filterNot (x => req.definedNames contains x.companionName)
+ oldReq <- definedNameMap get name.companionName
+ newSym <- req.definedSymbols get name
+ oldSym <- oldReq.definedSymbols get name.companionName
+ } {
+ printMessage("warning: previously defined %s is not a companion to %s.".format(oldSym, newSym))
+ printMessage("Companions must be defined together; you may wish to use :paste mode for this.")
+ }
+ }
+
+ // Updating the defined name map
+ req.definedNames foreach { name =>
+ if (definedNameMap contains name) {
+ if (name.isTypeName) handleTypeRedefinition(name.toTypeName, definedNameMap(name), req)
+ else handleTermRedefinition(name.toTermName, definedNameMap(name), req)
+ }
+ definedNameMap(name) = req
+ }
+ }
+
+ /** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
+ def parse(line: String): Option[List[Tree]] = {
+ var justNeedsMore = false
+ reporter.withIncompleteHandler((pos,msg) => {justNeedsMore = true}) {
+ // simple parse: just parse it, nothing else
+ def simpleParse(code: String): List[Tree] = {
+ reporter.reset()
+ val unit = new CompilationUnit(new BatchSourceFile("<console>", code))
+ val scanner = new syntaxAnalyzer.UnitParser(unit)
+
+ scanner.templateStatSeq(false)._2
+ }
+ val trees = simpleParse(line)
+
+ if (reporter.hasErrors) Some(Nil) // the result did not parse, so stop
+ else if (justNeedsMore) None
+ else Some(trees)
+ }
+ }
+
+ def isParseable(line: String): Boolean = {
+ beSilentDuring {
+ parse(line) match {
+ case Some(xs) => xs.nonEmpty // parses as-is
+ case None => true // incomplete
+ }
+ }
+ }
+
+ /** Compile an nsc SourceFile. Returns true if there are
+ * no compilation errors, or false otherwise.
+ */
+ def compileSources(sources: SourceFile*): Boolean = {
+ reporter.reset()
+ new Run() compileSources sources.toList
+ !reporter.hasErrors
+ }
+
+ /** Compile a string. Returns true if there are no
+ * compilation errors, or false otherwise.
+ */
+ def compileString(code: String): Boolean =
+ compileSources(new BatchSourceFile("<script>", code))
+
+ /** Build a request from the user. `trees` is `line` after being parsed.
+ */
+ private def buildRequest(line: String, trees: List[Tree]): Request = new Request(line, trees)
+
+ private def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
+ val trees = parse(indentCode(line)) match {
+ case None => return Left(IR.Incomplete)
+ case Some(Nil) => return Left(IR.Error) // parse error or empty input
+ case Some(trees) => trees
+ }
+
+ // use synthetic vars to avoid filling up the resXX slots
+ def varName = if (synthetic) freshInternalVarName() else freshUserVarName()
+
+ // Treat a single bare expression specially. This is necessary due to it being hard to
+ // modify code at a textual level, and it being hard to submit an AST to the compiler.
+ if (trees.size == 1) trees.head match {
+ case _:Assign => // we don't want to include assignments
+ case _:TermTree | _:Ident | _:Select => // ... but do want these as valdefs.
+ requestFromLine("val %s =\n%s".format(varName, line), synthetic) match {
+ case Right(req) => return Right(req withOriginalLine line)
+ case x => return x
+ }
+ case _ =>
+ }
+
+ // figure out what kind of request
+ Right(buildRequest(line, trees))
+ }
+
+ /**
+ * Interpret one line of input. All feedback, including parse errors
+ * and evaluation results, are printed via the supplied compiler's
+ * reporter. Values defined are available for future interpreted
+ * strings.
+ *
+ *
+ * The return value is whether the line was interpreter successfully,
+ * e.g. that there were no parse errors.
+ *
+ *
+ * @param line ...
+ * @return ...
+ */
+ def interpret(line: String): IR.Result = interpret(line, false)
+ def interpret(line: String, synthetic: Boolean): IR.Result = {
+ def loadAndRunReq(req: Request) = {
+ val (result, succeeded) = req.loadAndRun
+ /** To our displeasure, ConsoleReporter offers only printMessage,
+ * which tacks a newline on the end. Since that breaks all the
+ * output checking, we have to take one off to balance.
+ */
+ def show() = {
+ if (result == "") ()
+ else printMessage(result stripSuffix "\n")
+ }
+
+ if (succeeded) {
+ if (printResults)
+ show()
+ // Book-keeping. Have to record synthetic requests too,
+ // as they may have been issued for information, e.g. :type
+ recordRequest(req)
+ IR.Success
+ }
+ else {
+ // don't truncate stack traces
+ withoutTruncating(show())
+ IR.Error
+ }
+ }
+
+ if (global == null) IR.Error
+ else requestFromLine(line, synthetic) match {
+ case Left(result) => result
+ case Right(req) =>
+ // null indicates a disallowed statement type; otherwise compile and
+ // fail if false (implying e.g. a type error)
+ if (req == null || !req.compile) IR.Error
+ else loadAndRunReq(req)
+ }
+ }
+
+ /** Bind a specified name to a specified value. The name may
+ * later be used by expressions passed to interpret.
+ *
+ * @param name the variable name to bind
+ * @param boundType the type of the variable, as a string
+ * @param value the object value to bind to it
+ * @return an indication of whether the binding succeeded
+ */
+ def bind(name: String, boundType: String, value: Any): IR.Result = {
+ val bindRep = new ReadEvalPrint()
+ val run = bindRep.compile("""
+ |object %s {
+ | var value: %s = _
+ | def set(x: Any) = value = x.asInstanceOf[%s]
+ |}
+ """.stripMargin.format(bindRep.evalName, boundType, boundType)
+ )
+ bindRep.callOpt("set", value) match {
+ case Some(_) => interpret("val %s = %s.value".format(name, bindRep.evalPath))
+ case _ => DBG("Set failed in bind(%s, %s, %s)".format(name, boundType, value)) ; IR.Error
+ }
+ }
+ def rebind(p: NamedParam): IR.Result = {
+ val name = p.name
+ val oldType = typeOfTerm(name) getOrElse { return IR.Error }
+ val newType = p.tpe
+ val tempName = freshInternalVarName()
+
+ quietRun("val %s = %s".format(tempName, name))
+ quietRun("val %s = %s.asInstanceOf[%s]".format(name, tempName, newType))
+ }
+ def quietImport(ids: String*): IR.Result = beQuietDuring(addImports(ids: _*))
+ def addImports(ids: String*): IR.Result =
+ if (ids.isEmpty) IR.Success
+ else interpret("import " + ids.mkString(", "))
+
+ def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p))
+ def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
+ def bind[T: Manifest](name: String, value: T): IR.Result = bind((name, value))
+ def bindValue(x: Any): IR.Result = bind(freshUserVarName(), TypeStrings.fromValue(x), x)
+
+ /** Reset this interpreter, forgetting all user-specified requests. */
+ def reset() {
+ //virtualDirectory.clear()
+ virtualDirectory.delete()
+ virtualDirectory.create()
+ resetClassLoader()
+ resetAllCreators()
+ prevRequests.clear()
+ }
+
+ /** This instance is no longer needed, so release any resources
+ * it is using. The reporter's output gets flushed.
+ */
+ def close() {
+ reporter.flush()
+ classServer.stop()
+ }
+
+ /** Here is where we:
+ *
+ * 1) Read some source code, and put it in the "read" object.
+ * 2) Evaluate the read object, and put the result in the "eval" object.
+ * 3) Create a String for human consumption, and put it in the "print" object.
+ *
+ * Read! Eval! Print! Some of that not yet centralized here.
+ */
+ class ReadEvalPrint(lineId: Int) {
+ def this() = this(freshLineId())
+
+ val packageName = "$line" + lineId
+ val readName = "$read"
+ val evalName = "$eval"
+ val printName = "$print"
+ val valueMethod = "$result" // no-args method giving result
+
+ // TODO: split it out into a package object and a regular
+ // object and we can do that much less wrapping.
+ def packageDecl = "package " + packageName
+
+ def pathTo(name: String) = packageName + "." + name
+ def packaged(code: String) = packageDecl + "\n\n" + code
+
+ def readPath = pathTo(readName)
+ def evalPath = pathTo(evalName)
+ def printPath = pathTo(printName)
+
+ def call(name: String, args: Any*): AnyRef =
+ evalMethod(name).invoke(evalClass, args.map(_.asInstanceOf[AnyRef]): _*)
+
+ def callOpt(name: String, args: Any*): Option[AnyRef] =
+ try Some(call(name, args: _*))
+ catch { case ex: Exception =>
+ quietBind("lastException", ex)
+ None
+ }
+
+ lazy val evalClass = loadByName(evalPath)
+ lazy val evalValue = callOpt(valueMethod)
+
+ def compile(source: String): Boolean = compileAndSaveRun("<console>", source)
+ def lineAfterTyper[T](op: => T): T = {
+ assert(lastRun != null, "Internal error: trying to use atPhase, but Run is null." + this)
+ atPhase(lastRun.typerPhase.next)(op)
+ }
+
+ /** The innermost object inside the wrapper, found by
+ * following accessPath into the outer one.
+ */
+ def resolvePathToSymbol(accessPath: String): Symbol = {
+ //val readRoot = definitions.getModule(readPath) // the outermost wrapper
+ // MATEI: changed this to getClass because the root object is no longer a module (Scala singleton object)
+ val readRoot = definitions.getClass(readPath) // the outermost wrapper
+ (accessPath split '.').foldLeft(readRoot) { (sym, name) =>
+ if (name == "") sym else
+ lineAfterTyper(sym.info member newTermName(name))
+ }
+ }
+
+ // def compileAndTypeExpr(expr: String): Option[Typer] = {
+ // class TyperRun extends Run {
+ // override def stopPhase(name: String) = name == "superaccessors"
+ // }
+ // }
+ private var lastRun: Run = _
+ private def evalMethod(name: String) = {
+ val methods = evalClass.getMethods filter (_.getName == name)
+ assert(methods.size == 1, "Internal error - eval object method " + name + " is overloaded: " + methods)
+ methods.head
+ }
+ private def compileAndSaveRun(label: String, code: String) = {
+ showCodeIfDebugging(code)
+ reporter.reset()
+ lastRun = new Run()
+ lastRun.compileSources(List(new BatchSourceFile(label, packaged(code))))
+ !reporter.hasErrors
+ }
+ }
+
+ /** One line of code submitted by the user for interpretation */
+ // private
+ class Request(val line: String, val trees: List[Tree]) {
+ val lineRep = new ReadEvalPrint()
+ import lineRep.lineAfterTyper
+
+ private var _originalLine: String = null
+ def withOriginalLine(s: String): this.type = { _originalLine = s ; this }
+ def originalLine = if (_originalLine == null) line else _originalLine
+
+ /** handlers for each tree in this request */
+ val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _)
+
+ /** all (public) names defined by these statements */
+ val definedNames = handlers flatMap (_.definedNames)
+
+ /** list of names used by this expression */
+ val referencedNames: List[Name] = handlers flatMap (_.referencedNames)
+
+ /** def and val names */
+ def termNames = handlers flatMap (_.definesTerm)
+ def typeNames = handlers flatMap (_.definesType)
+
+ /** Code to import bound names from previous lines - accessPath is code to
+ * append to objectName to access anything bound by request.
+ */
+ val ComputedImports(importsPreamble, importsTrailer, accessPath) =
+ importsCode(referencedNames.toSet)
+
+ /** Code to access a variable with the specified name */
+ def fullPath(vname: String) = (
+ //lineRep.readPath + accessPath + ".`%s`".format(vname)
+ lineRep.readPath + ".INSTANCE" + accessPath + ".`%s`".format(vname)
+ )
+ /** Same as fullpath, but after it has been flattened, so:
+ * $line5.$iw.$iw.$iw.Bippy // fullPath
+ * $line5.$iw$$iw$$iw$Bippy // fullFlatName
+ */
+ def fullFlatName(name: String) =
+ lineRep.readPath + accessPath.replace('.', '$') + "$" + name
+
+ /** Code to access a variable with the specified name */
+ def fullPath(vname: Name): String = fullPath(vname.toString)
+
+ /** the line of code to compute */
+ def toCompute = line
+
+ /** generate the source code for the object that computes this request */
+ private object ObjectSourceCode extends CodeAssembler[MemberHandler] {
+ val preamble = """
+ |class %s extends Serializable {
+ | %s%s
+ """.stripMargin.format(lineRep.readName, importsPreamble, indentCode(toCompute))
+ val postamble = importsTrailer + "\n}" + "\n" +
+ "object " + lineRep.readName + " {\n" +
+ " val INSTANCE = new " + lineRep.readName + "();\n" +
+ "}\n"
+ val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this
+ /*
+ val preamble = """
+ |object %s {
+ | %s%s
+ """.stripMargin.format(lineRep.readName, importsPreamble, indentCode(toCompute))
+ val postamble = importsTrailer + "\n}"
+ val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this
+ */
+ }
+
+ private object ResultObjectSourceCode extends CodeAssembler[MemberHandler] {
+ /** We only want to generate this code when the result
+ * is a value which can be referred to as-is.
+ */
+ val evalResult =
+ if (!handlers.last.definesValue) ""
+ else handlers.last.definesTerm match {
+ case Some(vname) if typeOf contains vname =>
+ """
+ |lazy val $result = {
+ | $export
+ | %s
+ |}""".stripMargin.format(fullPath(vname))
+ case _ => ""
+ }
+ // first line evaluates object to make sure constructor is run
+ // initial "" so later code can uniformly be: + etc
+ val preamble = """
+ |object %s {
+ | %s
+ | val $export: String = %s {
+ | %s
+ | (""
+ """.stripMargin.format(
+ lineRep.evalName, evalResult, executionWrapper, lineRep.readName + ".INSTANCE" + accessPath
+ )
+
+ val postamble = """
+ | )
+ | }
+ |}
+ """.stripMargin
+ val generate = (m: MemberHandler) => m resultExtractionCode Request.this
+ }
+
+ // get it
+ def getEvalTyped[T] : Option[T] = getEval map (_.asInstanceOf[T])
+ def getEval: Option[AnyRef] = {
+ // ensure it has been compiled
+ compile
+ // try to load it and call the value method
+ lineRep.evalValue filterNot (_ == null)
+ }
+
+ /** Compile the object file. Returns whether the compilation succeeded.
+ * If all goes well, the "types" map is computed. */
+ lazy val compile: Boolean = {
+ // error counting is wrong, hence interpreter may overlook failure - so we reset
+ reporter.reset()
+
+ // compile the object containing the user's code
+ lineRep.compile(ObjectSourceCode(handlers)) && {
+ // extract and remember types
+ typeOf
+ typesOfDefinedTerms
+
+ // compile the result-extraction object
+ lineRep compile ResultObjectSourceCode(handlers)
+ }
+ }
+
+ lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath)
+ def applyToResultMember[T](name: Name, f: Symbol => T) = lineAfterTyper(f(resultSymbol.info.nonPrivateDecl(name)))
+
+ /* typeOf lookup with encoding */
+ def lookupTypeOf(name: Name) = {
+ typeOf.getOrElse(name, typeOf(global.encode(name.toString)))
+ }
+ def simpleNameOfType(name: TypeName) = {
+ (compilerTypeOf get name) map (_.typeSymbol.simpleName)
+ }
+
+ private def typeMap[T](f: Type => T): Map[Name, T] = {
+ def toType(name: Name): T = {
+ // the types are all =>T; remove the =>
+ val tp1 = lineAfterTyper(resultSymbol.info.nonPrivateDecl(name).tpe match {
+ case NullaryMethodType(tp) => tp
+ case tp => tp
+ })
+ // normalize non-public types so we don't see protected aliases like Self
+ lineAfterTyper(tp1 match {
+ case TypeRef(_, sym, _) if !sym.isPublic => f(tp1.normalize)
+ case tp => f(tp)
+ })
+ }
+ termNames ++ typeNames map (x => x -> toType(x)) toMap
+ }
+ /** Types of variables defined by this request. */
+ lazy val compilerTypeOf = typeMap[Type](x => x)
+ /** String representations of same. */
+ lazy val typeOf = typeMap[String](_.toString)
+
+ // lazy val definedTypes: Map[Name, Type] = {
+ // typeNames map (x => x -> afterTyper(resultSymbol.info.nonPrivateDecl(x).tpe)) toMap
+ // }
+ lazy val definedSymbols: Map[Name, Symbol] = (
+ termNames.map(x => x -> applyToResultMember(x, x => x)) ++
+ typeNames.map(x => x -> compilerTypeOf.get(x).map(_.typeSymbol).getOrElse(NoSymbol))
+ ).toMap
+
+ lazy val typesOfDefinedTerms: Map[Name, Type] =
+ termNames map (x => x -> applyToResultMember(x, _.tpe)) toMap
+
+ private def bindExceptionally(t: Throwable) = {
+ val ex: Exceptional =
+ if (isettings.showInternalStackTraces) Exceptional(t)
+ else new Exceptional(t) {
+ override def spanFn(frame: JavaStackFrame) = !(frame.className startsWith lineRep.evalPath)
+ override def contextPrelude = super.contextPrelude + "/* The repl internal portion of the stack trace is elided. */\n"
+ }
+
+ quietBind("lastException", ex)
+ ex.contextHead + "\n(access lastException for the full trace)"
+ }
+ private def bindUnexceptionally(t: Throwable) = {
+ quietBind("lastException", t)
+ stackTraceString(t)
+ }
+
+ /** load and run the code using reflection */
+ def loadAndRun: (String, Boolean) = {
+ import interpreter.Line._
+
+ def handleException(t: Throwable) = {
+ /** We turn off the binding to accomodate ticket #2817 */
+ withoutBindingLastException {
+ val message =
+ if (opt.richExes) bindExceptionally(unwrap(t))
+ else bindUnexceptionally(unwrap(t))
+
+ (message, false)
+ }
+ }
+
+ try {
+ val execution = lineManager.set(originalLine) {
+ // MATEI: set the right SparkEnv for our SparkContext, because
+ // this execution will happen in a separate thread
+ val sc = spark.repl.Main.interp.sparkContext
+ if (sc != null && sc.env != null)
+ SparkEnv.set(sc.env)
+ // Execute the line
+ lineRep call "$export"
+ }
+ execution.await()
+
+ execution.state match {
+ case Done => ("" + execution.get(), true)
+ case Threw =>
+ val ex = execution.caught()
+ if (isReplDebug)
+ ex.printStackTrace()
+
+ if (bindLastException) handleException(ex)
+ else throw ex
+ case Cancelled => ("Execution interrupted by signal.\n", false)
+ case Running => ("Execution still running! Seems impossible.", false)
+ }
+ }
+ finally lineManager.clear()
+ }
+
+ override def toString = "Request(line=%s, %s trees)".format(line, trees.size)
+ }
+
+ /** Returns the name of the most recent interpreter result.
+ * Mostly this exists so you can conveniently invoke methods on
+ * the previous result.
+ */
+ def mostRecentVar: String =
+ if (mostRecentlyHandledTree.isEmpty) ""
+ else "" + (mostRecentlyHandledTree.get match {
+ case x: ValOrDefDef => x.name
+ case Assign(Ident(name), _) => name
+ case ModuleDef(_, name, _) => name
+ case _ => naming.mostRecentVar
+ })
+
+ private def requestForName(name: Name): Option[Request] = {
+ assert(definedNameMap != null, "definedNameMap is null")
+ definedNameMap get name
+ }
+
+ private def requestForIdent(line: String): Option[Request] =
+ requestForName(newTermName(line)) orElse requestForName(newTypeName(line))
+
+ def safeClass(name: String): Option[Symbol] = {
+ try Some(definitions.getClass(newTypeName(name)))
+ catch { case _: MissingRequirementError => None }
+ }
+ def safeModule(name: String): Option[Symbol] = {
+ try Some(definitions.getModule(newTermName(name)))
+ catch { case _: MissingRequirementError => None }
+ }
+
+ def definitionForName(name: Name): Option[MemberHandler] =
+ requestForName(name) flatMap { req =>
+ req.handlers find (_.definedNames contains name)
+ }
+
+ def valueOfTerm(id: String): Option[AnyRef] =
+ requestForIdent(id) flatMap (_.getEval)
+
+ def classOfTerm(id: String): Option[JClass] =
+ valueOfTerm(id) map (_.getClass)
+
+ def typeOfTerm(id: String): Option[Type] = newTermName(id) match {
+ case nme.ROOTPKG => Some(definitions.RootClass.tpe)
+ case name => requestForName(name) flatMap (_.compilerTypeOf get name)
+ }
+ def symbolOfTerm(id: String): Symbol =
+ requestForIdent(id) flatMap (_.definedSymbols get newTermName(id)) getOrElse NoSymbol
+
+ def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = {
+ for {
+ clazz <- classOfTerm(id)
+ tpe <- runtimeTypeOfTerm(id)
+ nonAnon <- new RichClass(clazz).supers.find(c => !(new RichClass(c).isScalaAnonymous))
+ } yield {
+ (nonAnon, tpe)
+ }
+ }
+
+ def runtimeTypeOfTerm(id: String): Option[Type] = {
+ for {
+ tpe <- typeOfTerm(id)
+ clazz <- classOfTerm(id)
+ val staticSym = tpe.typeSymbol
+ runtimeSym <- safeClass(clazz.getName)
+ if runtimeSym != staticSym
+ if runtimeSym isSubClass staticSym
+ } yield {
+ runtimeSym.info
+ }
+ }
+
+ // XXX literals.
+ // 1) Identifiers defined in the repl.
+ // 2) A path loadable via getModule.
+ // 3) Try interpreting it as an expression.
+ private var typeOfExpressionDepth = 0
+ def typeOfExpression(expr: String): Option[Type] = {
+ DBG("typeOfExpression(" + expr + ")")
+ if (typeOfExpressionDepth > 2) {
+ DBG("Terminating typeOfExpression recursion for expression: " + expr)
+ return None
+ }
+
+ def asQualifiedImport = {
+ val name = expr.takeWhile(_ != '.')
+ importedTermNamed(name) flatMap { sym =>
+ typeOfExpression(sym.fullName + expr.drop(name.length))
+ }
+ }
+ def asModule = safeModule(expr) map (_.tpe)
+ def asExpr = beSilentDuring {
+ val lhs = freshInternalVarName()
+ val line = "lazy val " + lhs + " = { " + expr + " } "
+
+ interpret(line, true) match {
+ case IR.Success => typeOfExpression(lhs)
+ case _ => None
+ }
+ }
+
+ typeOfExpressionDepth += 1
+ try typeOfTerm(expr) orElse asModule orElse asExpr orElse asQualifiedImport
+ finally typeOfExpressionDepth -= 1
+ }
+ // def compileAndTypeExpr(expr: String): Option[Typer] = {
+ // class TyperRun extends Run {
+ // override def stopPhase(name: String) = name == "superaccessors"
+ // }
+ // }
+
+ protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x }
+ protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x }
+
+ def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalVarName
+ def definedTypes = onlyTypes(allDefinedNames)
+ def definedSymbols = prevRequests.toSet flatMap ((x: Request) => x.definedSymbols.values)
+
+ /** the previous requests this interpreter has processed */
+ private lazy val prevRequests = mutable.ListBuffer[Request]()
+ private lazy val referencedNameMap = mutable.Map[Name, Request]()
+ private lazy val definedNameMap = mutable.Map[Name, Request]()
+ protected def prevRequestList = prevRequests.toList
+ private def allHandlers = prevRequestList flatMap (_.handlers)
+ def allSeenTypes = prevRequestList flatMap (_.typeOf.values.toList) distinct
+ def allImplicits = allHandlers filter (_.definesImplicit) flatMap (_.definedNames)
+ def importHandlers = allHandlers collect { case x: ImportHandler => x }
+
+ def visibleTermNames: List[Name] = definedTerms ++ importedTerms distinct
+
+ /** Another entry point for tab-completion, ids in scope */
+ def unqualifiedIds = visibleTermNames map (_.toString) filterNot (_ contains "$") sorted
+
+ /** Parse the ScalaSig to find type aliases */
+ def aliasForType(path: String) = ByteCode.aliasForType(path)
+
+ def withoutUnwrapping(op: => Unit): Unit = {
+ val saved = isettings.unwrapStrings
+ isettings.unwrapStrings = false
+ try op
+ finally isettings.unwrapStrings = saved
+ }
+
+ def symbolDefString(sym: Symbol) = {
+ TypeStrings.quieter(
+ afterTyper(sym.defString),
+ sym.owner.name + ".this.",
+ sym.owner.fullName + "."
+ )
+ }
+
+ def showCodeIfDebugging(code: String) {
+ /** Secret bookcase entrance for repl debuggers: end the line
+ * with "// show" and see what's going on.
+ */
+ if (SPARK_DEBUG_REPL || code.lines.exists(_.trim endsWith "// show")) {
+ echo(code)
+ parse(code) foreach (ts => ts foreach (t => withoutUnwrapping(DBG(asCompactString(t)))))
+ }
+ }
+ // debugging
+ def debugging[T](msg: String)(res: T) = {
+ DBG(msg + " " + res)
+ res
+ }
+ def DBG(s: => String) = if (isReplDebug) {
+ //try repldbg(s)
+ //catch { case x: AssertionError => repldbg("Assertion error printing debug string:\n " + x) }
+ }
+}
+
+/** Utility methods for the Interpreter. */
+object SparkIMain {
+ // The two name forms this is catching are the two sides of this assignment:
+ //
+ // $line3.$read.$iw.$iw.Bippy =
+ // $line3.$read$$iw$$iw$Bippy@4a6a00ca
+ private def removeLineWrapper(s: String) = s.replaceAll("""\$line\d+[./]\$(read|eval|print)[$.]""", "")
+ private def removeIWPackages(s: String) = s.replaceAll("""\$(iw|iwC|read|eval|print)[$.]""", "")
+ private def removeSparkVals(s: String) = s.replaceAll("""\$VAL[0-9]+[$.]""", "")
+ def stripString(s: String) = removeSparkVals(removeIWPackages(removeLineWrapper(s)))
+
+ trait CodeAssembler[T] {
+ def preamble: String
+ def generate: T => String
+ def postamble: String
+
+ def apply(contributors: List[T]): String = stringFromWriter { code =>
+ code println preamble
+ contributors map generate foreach (code println _)
+ code println postamble
+ }
+ }
+
+ trait StrippingWriter {
+ def isStripping: Boolean
+ def stripImpl(str: String): String
+ def strip(str: String): String = if (isStripping) stripImpl(str) else str
+ }
+ trait TruncatingWriter {
+ def maxStringLength: Int
+ def isTruncating: Boolean
+ def truncate(str: String): String = {
+ if (isTruncating && str.length > maxStringLength)
+ (str take maxStringLength - 3) + "..."
+ else str
+ }
+ }
+ abstract class StrippingTruncatingWriter(out: PrintWriter)
+ extends PrintWriter(out)
+ with StrippingWriter
+ with TruncatingWriter {
+ self =>
+
+ def clean(str: String): String = truncate(strip(str))
+ override def write(str: String) = super.write(clean(str))
+ }
+ class ReplStrippingWriter(intp: SparkIMain) extends StrippingTruncatingWriter(intp.out) {
+ import intp._
+ def maxStringLength = isettings.maxPrintString
+ def isStripping = isettings.unwrapStrings
+ def isTruncating = reporter.truncationOK
+
+ def stripImpl(str: String): String = {
+ val cleaned = stripString(str)
+ var ctrlChars = 0
+ cleaned map { ch =>
+ if (ch.isControl && !ch.isWhitespace) {
+ ctrlChars += 1
+ if (ctrlChars > 5) return "[line elided for control chars: possibly a scala signature]"
+ else '?'
+ }
+ else ch
+ }
+ }
+ }
+
+ class ReplReporter(intp: SparkIMain) extends ConsoleReporter(intp.settings, null, new ReplStrippingWriter(intp)) {
+ override def printMessage(msg: String) {
+ // Avoiding deadlock when the compiler starts logging before
+ // the lazy val is done.
+ if (intp.isInitializeComplete) {
+ if (intp.totalSilence) ()
+ else super.printMessage(msg)
+ }
+ else Console.println(msg)
+ }
+ }
+}
diff --git a/repl/src/main/scala/spark/repl/SparkISettings.scala b/repl/src/main/scala/spark/repl/SparkISettings.scala
new file mode 100644
index 0000000000..8ebb01d146
--- /dev/null
+++ b/repl/src/main/scala/spark/repl/SparkISettings.scala
@@ -0,0 +1,63 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Alexander Spoon
+ */
+
+package spark.repl
+
+import scala.tools.nsc._
+import scala.tools.nsc.interpreter._
+
+/** Settings for the interpreter
+ *
+ * @version 1.0
+ * @author Lex Spoon, 2007/3/24
+ **/
+class SparkISettings(intp: SparkIMain) {
+ /** A list of paths where :load should look */
+ var loadPath = List(".")
+
+ /** Set this to true to see repl machinery under -Yrich-exceptions.
+ */
+ var showInternalStackTraces = false
+
+ /** The maximum length of toString to use when printing the result
+ * of an evaluation. 0 means no maximum. If a printout requires
+ * more than this number of characters, then the printout is
+ * truncated.
+ */
+ var maxPrintString = 800
+
+ /** The maximum number of completion candidates to print for tab
+ * completion without requiring confirmation.
+ */
+ var maxAutoprintCompletion = 250
+
+ /** String unwrapping can be disabled if it is causing issues.
+ * Settings this to false means you will see Strings like "$iw.$iw.".
+ */
+ var unwrapStrings = true
+
+ def deprecation_=(x: Boolean) = {
+ val old = intp.settings.deprecation.value
+ intp.settings.deprecation.value = x
+ if (!old && x) println("Enabled -deprecation output.")
+ else if (old && !x) println("Disabled -deprecation output.")
+ }
+ def deprecation: Boolean = intp.settings.deprecation.value
+
+ def allSettings = Map(
+ "maxPrintString" -> maxPrintString,
+ "maxAutoprintCompletion" -> maxAutoprintCompletion,
+ "unwrapStrings" -> unwrapStrings,
+ "deprecation" -> deprecation
+ )
+
+ private def allSettingsString =
+ allSettings.toList sortBy (_._1) map { case (k, v) => " " + k + " = " + v + "\n" } mkString
+
+ override def toString = """
+ | SparkISettings {
+ | %s
+ | }""".stripMargin.format(allSettingsString)
+}
diff --git a/repl/src/main/scala/spark/repl/SparkImports.scala b/repl/src/main/scala/spark/repl/SparkImports.scala
new file mode 100644
index 0000000000..5caf5ca51a
--- /dev/null
+++ b/repl/src/main/scala/spark/repl/SparkImports.scala
@@ -0,0 +1,214 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package spark.repl
+
+import scala.tools.nsc._
+import scala.tools.nsc.interpreter._
+
+import scala.collection.{ mutable, immutable }
+
+trait SparkImports {
+ self: SparkIMain =>
+
+ import global._
+ import definitions.{ ScalaPackage, JavaLangPackage, PredefModule }
+ import memberHandlers._
+
+ /** Synthetic import handlers for the language defined imports. */
+ private def makeWildcardImportHandler(sym: Symbol): ImportHandler = {
+ val hd :: tl = sym.fullName.split('.').toList map newTermName
+ val tree = Import(
+ tl.foldLeft(Ident(hd): Tree)((x, y) => Select(x, y)),
+ List(ImportSelector(nme.WILDCARD, -1, null, -1))
+ )
+ tree setSymbol sym
+ new ImportHandler(tree)
+ }
+
+ /** Symbols whose contents are language-defined to be imported. */
+ def languageWildcardSyms: List[Symbol] = List(JavaLangPackage, ScalaPackage, PredefModule)
+ def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe)
+ def languageWildcardHandlers = languageWildcardSyms map makeWildcardImportHandler
+
+ def importedTerms = onlyTerms(importHandlers flatMap (_.importedNames))
+ def importedTypes = onlyTypes(importHandlers flatMap (_.importedNames))
+
+ /** Types which have been wildcard imported, such as:
+ * val x = "abc" ; import x._ // type java.lang.String
+ * import java.lang.String._ // object java.lang.String
+ *
+ * Used by tab completion.
+ *
+ * XXX right now this gets import x._ and import java.lang.String._,
+ * but doesn't figure out import String._. There's a lot of ad hoc
+ * scope twiddling which should be swept away in favor of digging
+ * into the compiler scopes.
+ */
+ def sessionWildcards: List[Type] = {
+ importHandlers flatMap {
+ case x if x.importsWildcard => x.targetType
+ case _ => None
+ } distinct
+ }
+ def wildcardTypes = languageWildcards ++ sessionWildcards
+
+ def languageSymbols = languageWildcardSyms flatMap membersAtPickler
+ def sessionImportedSymbols = importHandlers flatMap (_.importedSymbols)
+ def importedSymbols = languageSymbols ++ sessionImportedSymbols
+ def importedTermSymbols = importedSymbols collect { case x: TermSymbol => x }
+ def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x }
+ def implicitSymbols = importedSymbols filter (_.isImplicit)
+
+ def importedTermNamed(name: String) = importedTermSymbols find (_.name.toString == name)
+
+ /** Tuples of (source, imported symbols) in the order they were imported.
+ */
+ def importedSymbolsBySource: List[(Symbol, List[Symbol])] = {
+ val lang = languageWildcardSyms map (sym => (sym, membersAtPickler(sym)))
+ val session = importHandlers filter (_.targetType.isDefined) map { mh =>
+ (mh.targetType.get.typeSymbol, mh.importedSymbols)
+ }
+
+ lang ++ session
+ }
+ def implicitSymbolsBySource: List[(Symbol, List[Symbol])] = {
+ importedSymbolsBySource map {
+ case (k, vs) => (k, vs filter (_.isImplicit))
+ } filterNot (_._2.isEmpty)
+ }
+
+ /** Compute imports that allow definitions from previous
+ * requests to be visible in a new request. Returns
+ * three pieces of related code:
+ *
+ * 1. An initial code fragment that should go before
+ * the code of the new request.
+ *
+ * 2. A code fragment that should go after the code
+ * of the new request.
+ *
+ * 3. An access path which can be traverested to access
+ * any bindings inside code wrapped by #1 and #2 .
+ *
+ * The argument is a set of Names that need to be imported.
+ *
+ * Limitations: This method is not as precise as it could be.
+ * (1) It does not process wildcard imports to see what exactly
+ * they import.
+ * (2) If it imports any names from a request, it imports all
+ * of them, which is not really necessary.
+ * (3) It imports multiple same-named implicits, but only the
+ * last one imported is actually usable.
+ */
+ case class ComputedImports(prepend: String, append: String, access: String)
+ protected def importsCode(wanted: Set[Name]): ComputedImports = {
+ /** Narrow down the list of requests from which imports
+ * should be taken. Removes requests which cannot contribute
+ * useful imports for the specified set of wanted names.
+ */
+ case class ReqAndHandler(req: Request, handler: MemberHandler) { }
+
+ def reqsToUse: List[ReqAndHandler] = {
+ /** Loop through a list of MemberHandlers and select which ones to keep.
+ * 'wanted' is the set of names that need to be imported.
+ */
+ def select(reqs: List[ReqAndHandler], wanted: Set[Name]): List[ReqAndHandler] = {
+ val isWanted = wanted contains _
+ // Single symbol imports might be implicits! See bug #1752. Rather than
+ // try to finesse this, we will mimic all imports for now.
+ def keepHandler(handler: MemberHandler) = handler match {
+ case _: ImportHandler => true
+ case x => x.definesImplicit || (x.definedNames exists isWanted)
+ }
+
+ reqs match {
+ case Nil => Nil
+ case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted)
+ case rh :: rest =>
+ import rh.handler._
+ val newWanted = wanted ++ referencedNames -- definedNames -- importedNames
+ rh :: select(rest, newWanted)
+ }
+ }
+
+ /** Flatten the handlers out and pair each with the original request */
+ select(allReqAndHandlers reverseMap { case (r, h) => ReqAndHandler(r, h) }, wanted).reverse
+ }
+
+ val code, trailingBraces, accessPath = new StringBuilder
+ val currentImps = mutable.HashSet[Name]()
+
+ // add code for a new object to hold some imports
+ def addWrapper() {
+ val impname = nme.INTERPRETER_IMPORT_WRAPPER
+ code append "class %sC extends Serializable {\n".format(impname)
+ trailingBraces append "}\nval " + impname + " = new " + impname + "C;\n"
+ accessPath append ("." + impname)
+
+ currentImps.clear
+ }
+
+ addWrapper()
+
+ // loop through previous requests, adding imports for each one
+ for (ReqAndHandler(req, handler) <- reqsToUse) {
+ handler match {
+ // If the user entered an import, then just use it; add an import wrapping
+ // level if the import might conflict with some other import
+ case x: ImportHandler =>
+ if (x.importsWildcard || (currentImps exists (x.importedNames contains _)))
+ addWrapper()
+
+ code append (x.member + "\n")
+
+ // give wildcard imports a import wrapper all to their own
+ if (x.importsWildcard) addWrapper()
+ else currentImps ++= x.importedNames
+
+ // For other requests, import each defined name.
+ // import them explicitly instead of with _, so that
+ // ambiguity errors will not be generated. Also, quote
+ // the name of the variable, so that we don't need to
+ // handle quoting keywords separately.
+ case x =>
+ for (imv <- x.definedNames) {
+ // MATEI: Changed this check because it was messing up for case classes
+ // (trying to import them twice within the same wrapper), but that is more likely
+ // due to a miscomputation of names that makes the code think they're unique.
+ // Need to evaluate whether having so many wrappers is a bad thing.
+ /*if (currentImps contains imv)*/
+ val imvName = imv.toString
+ if (currentImps exists (_.toString == imvName)) addWrapper()
+
+ val objName = req.lineRep.readPath
+ val valName = "$VAL" + newValId();
+ code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
+ code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
+
+ //code append ("import %s\n" format (req fullPath imv))
+ currentImps += imv
+ }
+ }
+ }
+ // add one extra wrapper, to prevent warnings in the common case of
+ // redefining the value bound in the last interpreter request.
+ addWrapper()
+ ComputedImports(code.toString, trailingBraces.toString, accessPath.toString)
+ }
+
+ private def allReqAndHandlers =
+ prevRequestList flatMap (req => req.handlers map (req -> _))
+
+ private def membersAtPickler(sym: Symbol): List[Symbol] =
+ atPickler(sym.info.nonPrivateMembers)
+
+ private var curValId = 0
+
+ private def newValId(): Int = {
+ curValId += 1
+ curValId
+ }
+}
diff --git a/repl/src/main/scala/spark/repl/SparkInteractiveReader.scala b/repl/src/main/scala/spark/repl/SparkInteractiveReader.scala
deleted file mode 100644
index 4f5a0a6fa0..0000000000
--- a/repl/src/main/scala/spark/repl/SparkInteractiveReader.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
- * @author Stepan Koltsov
- */
-
-package spark.repl
-
-import scala.tools.nsc
-import scala.tools.nsc._
-import scala.tools.nsc.interpreter
-import scala.tools.nsc.interpreter._
-
-import scala.util.control.Exception._
-
-/** Reads lines from an input stream */
-trait SparkInteractiveReader {
- import SparkInteractiveReader._
- import java.io.IOException
-
- protected def readOneLine(prompt: String): String
- val interactive: Boolean
-
- def readLine(prompt: String): String = {
- def handler: Catcher[String] = {
- case e: IOException if restartSystemCall(e) => readLine(prompt)
- }
- catching(handler) { readOneLine(prompt) }
- }
-
- // override if history is available
- def history: Option[History] = None
- def historyList = history map (_.asList) getOrElse Nil
-
- // override if completion is available
- def completion: Option[SparkCompletion] = None
-
- // hack necessary for OSX jvm suspension because read calls are not restarted after SIGTSTP
- private def restartSystemCall(e: Exception): Boolean =
- Properties.isMac && (e.getMessage == msgEINTR)
-}
-
-
-object SparkInteractiveReader {
- val msgEINTR = "Interrupted system call"
- private val exes = List(classOf[Exception], classOf[NoClassDefFoundError])
-
- def createDefault(): SparkInteractiveReader = createDefault(null)
-
- /** Create an interactive reader. Uses <code>JLineReader</code> if the
- * library is available, but otherwise uses a <code>SimpleReader</code>.
- */
- def createDefault(interpreter: SparkInterpreter): SparkInteractiveReader =
- try new SparkJLineReader(interpreter)
- catch {
- case e @ (_: Exception | _: NoClassDefFoundError) =>
- // println("Failed to create SparkJLineReader(%s): %s".format(interpreter, e))
- new SparkSimpleReader
- }
-}
-
diff --git a/repl/src/main/scala/spark/repl/SparkInterpreter.scala b/repl/src/main/scala/spark/repl/SparkInterpreter.scala
deleted file mode 100644
index 10ea346658..0000000000
--- a/repl/src/main/scala/spark/repl/SparkInterpreter.scala
+++ /dev/null
@@ -1,1395 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package spark.repl
-
-import scala.tools.nsc
-import scala.tools.nsc._
-
-import Predef.{ println => _, _ }
-import java.io.{ File, IOException, PrintWriter, StringWriter, Writer }
-import File.pathSeparator
-import java.lang.{ Class, ClassLoader }
-import java.net.{ MalformedURLException, URL }
-import java.lang.reflect
-import reflect.InvocationTargetException
-import java.util.UUID
-
-import scala.PartialFunction.{ cond, condOpt }
-import scala.tools.util.PathResolver
-import scala.reflect.Manifest
-import scala.collection.mutable
-import scala.collection.mutable.{ ListBuffer, HashSet, HashMap, ArrayBuffer }
-import scala.collection.immutable.Set
-import scala.tools.nsc.util.ScalaClassLoader
-import ScalaClassLoader.URLClassLoader
-import scala.util.control.Exception.{ Catcher, catching, ultimately, unwrapping }
-
-import io.{ PlainFile, VirtualDirectory }
-import reporters.{ ConsoleReporter, Reporter }
-import symtab.{ Flags, Names }
-import util.{ SourceFile, BatchSourceFile, ScriptSourceFile, ClassPath, Chars, stringFromWriter }
-import scala.reflect.NameTransformer
-import scala.tools.nsc.{ InterpreterResults => IR }
-import interpreter._
-import SparkInterpreter._
-
-import spark.HttpServer
-import spark.Utils
-
-/** <p>
- * An interpreter for Scala code.
- * </p>
- * <p>
- * The main public entry points are <code>compile()</code>,
- * <code>interpret()</code>, and <code>bind()</code>.
- * The <code>compile()</code> method loads a
- * complete Scala file. The <code>interpret()</code> method executes one
- * line of Scala code at the request of the user. The <code>bind()</code>
- * method binds an object to a variable that can then be used by later
- * interpreted code.
- * </p>
- * <p>
- * The overall approach is based on compiling the requested code and then
- * using a Java classloader and Java reflection to run the code
- * and access its results.
- * </p>
- * <p>
- * In more detail, a single compiler instance is used
- * to accumulate all successfully compiled or interpreted Scala code. To
- * "interpret" a line of code, the compiler generates a fresh object that
- * includes the line of code and which has public member(s) to export
- * all variables defined by that code. To extract the result of an
- * interpreted line to show the user, a second "result object" is created
- * which imports the variables exported by the above object and then
- * exports a single member named "scala_repl_result". To accomodate user expressions
- * that read from variables or methods defined in previous statements, "import"
- * statements are used.
- * </p>
- * <p>
- * This interpreter shares the strengths and weaknesses of using the
- * full compiler-to-Java. The main strength is that interpreted code
- * behaves exactly as does compiled code, including running at full speed.
- * The main weakness is that redefining classes and methods is not handled
- * properly, because rebinding at the Java level is technically difficult.
- * </p>
- *
- * @author Moez A. Abdel-Gawad
- * @author Lex Spoon
- */
-class SparkInterpreter(val settings: Settings, out: PrintWriter) {
- repl =>
-
- def println(x: Any) = {
- out.println(x)
- out.flush()
- }
-
- /** construct an interpreter that reports to Console */
- def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
- def this() = this(new Settings())
-
- val SPARK_DEBUG_REPL: Boolean = (System.getenv("SPARK_DEBUG_REPL") == "1")
-
- /** Local directory to save .class files too */
- val outputDir = {
- val tmp = System.getProperty("java.io.tmpdir")
- val rootDir = System.getProperty("spark.repl.classdir", tmp)
- Utils.createTempDir(rootDir)
- }
- if (SPARK_DEBUG_REPL) {
- println("Output directory: " + outputDir)
- }
-
- /** Scala compiler virtual directory for outputDir */
- //val virtualDirectory = new VirtualDirectory("(memory)", None)
- val virtualDirectory = new PlainFile(outputDir)
-
- /** Jetty server that will serve our classes to worker nodes */
- val classServer = new HttpServer(outputDir)
-
- // Start the classServer and store its URI in a spark system property
- // (which will be passed to executors so that they can connect to it)
- classServer.start()
- System.setProperty("spark.repl.class.uri", classServer.uri)
- if (SPARK_DEBUG_REPL) {
- println("Class server started, URI = " + classServer.uri)
- }
-
- /** reporter */
- object reporter extends ConsoleReporter(settings, null, out) {
- override def printMessage(msg: String) {
- out println clean(msg)
- out.flush()
- }
- }
-
- /** We're going to go to some trouble to initialize the compiler asynchronously.
- * It's critical that nothing call into it until it's been initialized or we will
- * run into unrecoverable issues, but the perceived repl startup time goes
- * through the roof if we wait for it. So we initialize it with a future and
- * use a lazy val to ensure that any attempt to use the compiler object waits
- * on the future.
- */
- private val _compiler: Global = newCompiler(settings, reporter)
- private def _initialize(): Boolean = {
- val source = """
- |// this is assembled to force the loading of approximately the
- |// classes which will be loaded on the first expression anyway.
- |class $repl_$init {
- | val x = "abc".reverse.length + (5 max 5)
- | scala.runtime.ScalaRunTime.stringOf(x)
- |}
- |""".stripMargin
-
- try {
- new _compiler.Run() compileSources List(new BatchSourceFile("<init>", source))
- if (isReplDebug || settings.debug.value)
- println("Repl compiler initialized.")
- true
- }
- catch {
- case MissingRequirementError(msg) => println("""
- |Failed to initialize compiler: %s not found.
- |** Note that as of 2.8 scala does not assume use of the java classpath.
- |** For the old behavior pass -usejavacp to scala, or if using a Settings
- |** object programatically, settings.usejavacp.value = true.""".stripMargin.format(msg)
- )
- false
- }
- }
-
- // set up initialization future
- private var _isInitialized: () => Boolean = null
- def initialize() = synchronized {
- if (_isInitialized == null)
- _isInitialized = scala.concurrent.ops future _initialize()
- }
-
- /** the public, go through the future compiler */
- lazy val compiler: Global = {
- initialize()
-
- // blocks until it is ; false means catastrophic failure
- if (_isInitialized()) _compiler
- else null
- }
-
- import compiler.{ Traverser, CompilationUnit, Symbol, Name, Type }
- import compiler.{
- Tree, TermTree, ValOrDefDef, ValDef, DefDef, Assign, ClassDef,
- ModuleDef, Ident, Select, TypeDef, Import, MemberDef, DocDef,
- ImportSelector, EmptyTree, NoType }
- import compiler.{ nme, newTermName, newTypeName }
- import nme.{
- INTERPRETER_VAR_PREFIX, INTERPRETER_SYNTHVAR_PREFIX, INTERPRETER_LINE_PREFIX,
- INTERPRETER_IMPORT_WRAPPER, INTERPRETER_WRAPPER_SUFFIX, USCOREkw
- }
-
- import compiler.definitions
- import definitions.{ EmptyPackage, getMember }
-
- /** whether to print out result lines */
- private[repl] var printResults: Boolean = true
-
- /** Temporarily be quiet */
- def beQuietDuring[T](operation: => T): T = {
- val wasPrinting = printResults
- ultimately(printResults = wasPrinting) {
- printResults = false
- operation
- }
- }
-
- /** whether to bind the lastException variable */
- private var bindLastException = true
-
- /** Temporarily stop binding lastException */
- def withoutBindingLastException[T](operation: => T): T = {
- val wasBinding = bindLastException
- ultimately(bindLastException = wasBinding) {
- bindLastException = false
- operation
- }
- }
-
- /** interpreter settings */
- lazy val isettings = new SparkInterpreterSettings(this)
-
- /** Instantiate a compiler. Subclasses can override this to
- * change the compiler class used by this interpreter. */
- protected def newCompiler(settings: Settings, reporter: Reporter) = {
- settings.outputDirs setSingleOutput virtualDirectory
- new Global(settings, reporter)
- }
-
- /** the compiler's classpath, as URL's */
- lazy val compilerClasspath: List[URL] = new PathResolver(settings) asURLs
-
- /* A single class loader is used for all commands interpreted by this Interpreter.
- It would also be possible to create a new class loader for each command
- to interpret. The advantages of the current approach are:
-
- - Expressions are only evaluated one time. This is especially
- significant for I/O, e.g. "val x = Console.readLine"
-
- The main disadvantage is:
-
- - Objects, classes, and methods cannot be rebound. Instead, definitions
- shadow the old ones, and old code objects refer to the old
- definitions.
- */
- private var _classLoader: ClassLoader = null
- def resetClassLoader() = _classLoader = makeClassLoader()
- def classLoader: ClassLoader = {
- if (_classLoader == null)
- resetClassLoader()
-
- _classLoader
- }
- private def makeClassLoader(): ClassLoader = {
- /*
- val parent =
- if (parentClassLoader == null) ScalaClassLoader fromURLs compilerClasspath
- else new URLClassLoader(compilerClasspath, parentClassLoader)
-
- new AbstractFileClassLoader(virtualDirectory, parent)
- */
- val parent =
- if (parentClassLoader == null)
- new java.net.URLClassLoader(compilerClasspath.toArray)
- else
- new java.net.URLClassLoader(compilerClasspath.toArray,
- parentClassLoader)
- val virtualDirUrl = new URL("file://" + virtualDirectory.path + "/")
- new java.net.URLClassLoader(Array(virtualDirUrl), parent)
- }
-
- private def loadByName(s: String): Class[_] = // (classLoader tryToInitializeClass s).get
- Class.forName(s, true, classLoader)
-
- private def methodByName(c: Class[_], name: String): reflect.Method =
- c.getMethod(name, classOf[Object])
-
- protected def parentClassLoader: ClassLoader = this.getClass.getClassLoader()
- def getInterpreterClassLoader() = classLoader
-
- // Set the current Java "context" class loader to this interpreter's class loader
- def setContextClassLoader() = Thread.currentThread.setContextClassLoader(classLoader)
-
- /** the previous requests this interpreter has processed */
- private val prevRequests = new ArrayBuffer[Request]()
- private val usedNameMap = new HashMap[Name, Request]()
- private val boundNameMap = new HashMap[Name, Request]()
- private def allHandlers = prevRequests.toList flatMap (_.handlers)
- private def allReqAndHandlers = prevRequests.toList flatMap (req => req.handlers map (req -> _))
-
- def printAllTypeOf = {
- prevRequests foreach { req =>
- req.typeOf foreach { case (k, v) => Console.println(k + " => " + v) }
- }
- }
-
- /** Most recent tree handled which wasn't wholly synthetic. */
- private def mostRecentlyHandledTree: Option[Tree] = {
- for {
- req <- prevRequests.reverse
- handler <- req.handlers.reverse
- name <- handler.generatesValue
- if !isSynthVarName(name)
- } return Some(handler.member)
-
- None
- }
-
- def recordRequest(req: Request) {
- def tripart[T](set1: Set[T], set2: Set[T]) = {
- val intersect = set1 intersect set2
- List(set1 -- intersect, intersect, set2 -- intersect)
- }
-
- prevRequests += req
- req.usedNames foreach (x => usedNameMap(x) = req)
- req.boundNames foreach (x => boundNameMap(x) = req)
-
- // XXX temporarily putting this here because of tricky initialization order issues
- // so right now it's not bound until after you issue a command.
- if (prevRequests.size == 1)
- quietBind("settings", "spark.repl.SparkInterpreterSettings", isettings)
-
- // println("\n s1 = %s\n s2 = %s\n s3 = %s".format(
- // tripart(usedNameMap.keysIterator.toSet, boundNameMap.keysIterator.toSet): _*
- // ))
- }
-
- private def keyList[T](x: collection.Map[T, _]): List[T] = x.keys.toList sortBy (_.toString)
- def allUsedNames = keyList(usedNameMap)
- def allBoundNames = keyList(boundNameMap)
- def allSeenTypes = prevRequests.toList flatMap (_.typeOf.values.toList) distinct
- def allValueGeneratingNames = allHandlers flatMap (_.generatesValue)
- def allImplicits = partialFlatMap(allHandlers) {
- case x: MemberHandler if x.definesImplicit => x.boundNames
- }
-
- /** Generates names pre0, pre1, etc. via calls to apply method */
- class NameCreator(pre: String) {
- private var x = -1
- var mostRecent: String = null
-
- def apply(): String = {
- x += 1
- val name = pre + x.toString
- // make sure we don't overwrite their unwisely named res3 etc.
- mostRecent =
- if (allBoundNames exists (_.toString == name)) apply()
- else name
-
- mostRecent
- }
- def reset(): Unit = x = -1
- def didGenerate(name: String) =
- (name startsWith pre) && ((name drop pre.length) forall (_.isDigit))
- }
-
- /** allocate a fresh line name */
- private lazy val lineNameCreator = new NameCreator(INTERPRETER_LINE_PREFIX)
-
- /** allocate a fresh var name */
- private lazy val varNameCreator = new NameCreator(INTERPRETER_VAR_PREFIX)
-
- /** allocate a fresh internal variable name */
- private lazy val synthVarNameCreator = new NameCreator(INTERPRETER_SYNTHVAR_PREFIX)
-
- /** Check if a name looks like it was generated by varNameCreator */
- private def isGeneratedVarName(name: String): Boolean = varNameCreator didGenerate name
- private def isSynthVarName(name: String): Boolean = synthVarNameCreator didGenerate name
- private def isSynthVarName(name: Name): Boolean = synthVarNameCreator didGenerate name.toString
-
- def getVarName = varNameCreator()
- def getSynthVarName = synthVarNameCreator()
-
- /** Truncate a string if it is longer than isettings.maxPrintString */
- private def truncPrintString(str: String): String = {
- val maxpr = isettings.maxPrintString
- val trailer = "..."
-
- if (maxpr <= 0 || str.length <= maxpr) str
- else str.substring(0, maxpr-3) + trailer
- }
-
- /** Clean up a string for output */
- private def clean(str: String) = truncPrintString(
- if (isettings.unwrapStrings && !SPARK_DEBUG_REPL) stripWrapperGunk(str)
- else str
- )
-
- /** Heuristically strip interpreter wrapper prefixes
- * from an interpreter output string.
- * MATEI: Copied from interpreter package object
- */
- def stripWrapperGunk(str: String): String = {
- val wrapregex = """(line[0-9]+\$object[$.])?(\$?VAL.?)*(\$iwC?(.this)?[$.])*"""
- str.replaceAll(wrapregex, "")
- }
-
- /** Indent some code by the width of the scala> prompt.
- * This way, compiler error messages read better.
- */
- private final val spaces = List.fill(7)(" ").mkString
- def indentCode(code: String) = {
- /** Heuristic to avoid indenting and thereby corrupting """-strings and XML literals. */
- val noIndent = (code contains "\n") && (List("\"\"\"", "</", "/>") exists (code contains _))
- stringFromWriter(str =>
- for (line <- code.lines) {
- if (!noIndent)
- str.print(spaces)
-
- str.print(line + "\n")
- str.flush()
- })
- }
- def indentString(s: String) = s split "\n" map (spaces + _ + "\n") mkString
-
- implicit def name2string(name: Name) = name.toString
-
- /** Compute imports that allow definitions from previous
- * requests to be visible in a new request. Returns
- * three pieces of related code:
- *
- * 1. An initial code fragment that should go before
- * the code of the new request.
- *
- * 2. A code fragment that should go after the code
- * of the new request.
- *
- * 3. An access path which can be traverested to access
- * any bindings inside code wrapped by #1 and #2 .
- *
- * The argument is a set of Names that need to be imported.
- *
- * Limitations: This method is not as precise as it could be.
- * (1) It does not process wildcard imports to see what exactly
- * they import.
- * (2) If it imports any names from a request, it imports all
- * of them, which is not really necessary.
- * (3) It imports multiple same-named implicits, but only the
- * last one imported is actually usable.
- */
- private case class ComputedImports(prepend: String, append: String, access: String)
- private def importsCode(wanted: Set[Name]): ComputedImports = {
- /** Narrow down the list of requests from which imports
- * should be taken. Removes requests which cannot contribute
- * useful imports for the specified set of wanted names.
- */
- case class ReqAndHandler(req: Request, handler: MemberHandler) { }
-
- def reqsToUse: List[ReqAndHandler] = {
- /** Loop through a list of MemberHandlers and select which ones to keep.
- * 'wanted' is the set of names that need to be imported.
- */
- def select(reqs: List[ReqAndHandler], wanted: Set[Name]): List[ReqAndHandler] = {
- val isWanted = wanted contains _
- // Single symbol imports might be implicits! See bug #1752. Rather than
- // try to finesse this, we will mimic all imports for now.
- def keepHandler(handler: MemberHandler) = handler match {
- case _: ImportHandler => true
- case x => x.definesImplicit || (x.boundNames exists isWanted)
- }
-
- reqs match {
- case Nil => Nil
- case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted)
- case rh :: rest =>
- val importedNames = rh.handler match { case x: ImportHandler => x.importedNames ; case _ => Nil }
- import rh.handler._
- val newWanted = wanted ++ usedNames -- boundNames -- importedNames
- rh :: select(rest, newWanted)
- }
- }
-
- /** Flatten the handlers out and pair each with the original request */
- select(allReqAndHandlers reverseMap { case (r, h) => ReqAndHandler(r, h) }, wanted).reverse
- }
-
- val code, trailingLines, accessPath = new StringBuffer
- val currentImps = HashSet[Name]()
-
- // add code for a new object to hold some imports
- def addWrapper() {
- /*
- val impname = INTERPRETER_IMPORT_WRAPPER
- code append "object %s {\n".format(impname)
- trailingLines append "}\n"
- accessPath append ("." + impname)
- currentImps.clear
- */
- val impname = INTERPRETER_IMPORT_WRAPPER
- code.append("@serializable class " + impname + "C {\n")
- trailingLines.append("}\nval " + impname + " = new " + impname + "C;\n")
- accessPath.append("." + impname)
- currentImps.clear
- }
-
- addWrapper()
-
- // loop through previous requests, adding imports for each one
- for (ReqAndHandler(req, handler) <- reqsToUse) {
- handler match {
- // If the user entered an import, then just use it; add an import wrapping
- // level if the import might conflict with some other import
- case x: ImportHandler =>
- if (x.importsWildcard || (currentImps exists (x.importedNames contains _)))
- addWrapper()
-
- code append (x.member.toString + "\n")
-
- // give wildcard imports a import wrapper all to their own
- if (x.importsWildcard) addWrapper()
- else currentImps ++= x.importedNames
-
- // For other requests, import each bound variable.
- // import them explicitly instead of with _, so that
- // ambiguity errors will not be generated. Also, quote
- // the name of the variable, so that we don't need to
- // handle quoting keywords separately.
- case x =>
- for (imv <- x.boundNames) {
- // MATEI: Commented this check out because it was messing up for case classes
- // (trying to import them twice within the same wrapper), but that is more likely
- // due to a miscomputation of names that makes the code think they're unique.
- // Need to evaluate whether having so many wrappers is a bad thing.
- /*if (currentImps contains imv) */ addWrapper()
-
- code.append("val " + req.objectName + "$VAL = " + req.objectName + ".INSTANCE;\n")
- code.append("import " + req.objectName + "$VAL" + req.accessPath + ".`" + imv + "`;\n")
-
- //code append ("import %s\n" format (req fullPath imv))
- currentImps += imv
- }
- }
- }
- // add one extra wrapper, to prevent warnings in the common case of
- // redefining the value bound in the last interpreter request.
- addWrapper()
- ComputedImports(code.toString, trailingLines.toString, accessPath.toString)
- }
-
- /** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
- private def parse(line: String): Option[List[Tree]] = {
- var justNeedsMore = false
- reporter.withIncompleteHandler((pos,msg) => {justNeedsMore = true}) {
- // simple parse: just parse it, nothing else
- def simpleParse(code: String): List[Tree] = {
- reporter.reset
- val unit = new CompilationUnit(new BatchSourceFile("<console>", code))
- val scanner = new compiler.syntaxAnalyzer.UnitParser(unit)
-
- scanner.templateStatSeq(false)._2
- }
- val trees = simpleParse(line)
-
- if (reporter.hasErrors) Some(Nil) // the result did not parse, so stop
- else if (justNeedsMore) None
- else Some(trees)
- }
- }
-
- /** Compile an nsc SourceFile. Returns true if there are
- * no compilation errors, or false otherwise.
- */
- def compileSources(sources: SourceFile*): Boolean = {
- reporter.reset
- new compiler.Run() compileSources sources.toList
- !reporter.hasErrors
- }
-
- /** Compile a string. Returns true if there are no
- * compilation errors, or false otherwise.
- */
- def compileString(code: String): Boolean =
- compileSources(new BatchSourceFile("<script>", code))
-
- def compileAndSaveRun(label: String, code: String) = {
- if (SPARK_DEBUG_REPL)
- println(code)
- if (isReplDebug) {
- parse(code) match {
- case Some(trees) => trees foreach (t => DBG(compiler.asCompactString(t)))
- case _ => DBG("Parse error:\n\n" + code)
- }
- }
- val run = new compiler.Run()
- run.compileSources(List(new BatchSourceFile(label, code)))
- run
- }
-
- /** Build a request from the user. <code>trees</code> is <code>line</code>
- * after being parsed.
- */
- private def buildRequest(line: String, lineName: String, trees: List[Tree]): Request =
- new Request(line, lineName, trees)
-
- private def chooseHandler(member: Tree): MemberHandler = member match {
- case member: DefDef => new DefHandler(member)
- case member: ValDef => new ValHandler(member)
- case member@Assign(Ident(_), _) => new AssignHandler(member)
- case member: ModuleDef => new ModuleHandler(member)
- case member: ClassDef => new ClassHandler(member)
- case member: TypeDef => new TypeAliasHandler(member)
- case member: Import => new ImportHandler(member)
- case DocDef(_, documented) => chooseHandler(documented)
- case member => new GenericHandler(member)
- }
-
- private def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
- val trees = parse(indentCode(line)) match {
- case None => return Left(IR.Incomplete)
- case Some(Nil) => return Left(IR.Error) // parse error or empty input
- case Some(trees) => trees
- }
-
- // use synthetic vars to avoid filling up the resXX slots
- def varName = if (synthetic) getSynthVarName else getVarName
-
- // Treat a single bare expression specially. This is necessary due to it being hard to
- // modify code at a textual level, and it being hard to submit an AST to the compiler.
- if (trees.size == 1) trees.head match {
- case _:Assign => // we don't want to include assignments
- case _:TermTree | _:Ident | _:Select => // ... but do want these as valdefs.
- return requestFromLine("val %s =\n%s".format(varName, line), synthetic)
- case _ =>
- }
-
- // figure out what kind of request
- Right(buildRequest(line, lineNameCreator(), trees))
- }
-
- /** <p>
- * Interpret one line of input. All feedback, including parse errors
- * and evaluation results, are printed via the supplied compiler's
- * reporter. Values defined are available for future interpreted
- * strings.
- * </p>
- * <p>
- * The return value is whether the line was interpreter successfully,
- * e.g. that there were no parse errors.
- * </p>
- *
- * @param line ...
- * @return ...
- */
- def interpret(line: String): IR.Result = interpret(line, false)
- def interpret(line: String, synthetic: Boolean): IR.Result = {
- def loadAndRunReq(req: Request) = {
- val (result, succeeded) = req.loadAndRun
- if (printResults || !succeeded)
- out print clean(result)
-
- // book-keeping
- if (succeeded && !synthetic)
- recordRequest(req)
-
- if (succeeded) IR.Success
- else IR.Error
- }
-
- if (compiler == null) IR.Error
- else requestFromLine(line, synthetic) match {
- case Left(result) => result
- case Right(req) =>
- // null indicates a disallowed statement type; otherwise compile and
- // fail if false (implying e.g. a type error)
- if (req == null || !req.compile) IR.Error
- else loadAndRunReq(req)
- }
- }
-
- /** A name creator used for objects created by <code>bind()</code>. */
- private lazy val newBinder = new NameCreator("binder")
-
- /** Bind a specified name to a specified value. The name may
- * later be used by expressions passed to interpret.
- *
- * @param name the variable name to bind
- * @param boundType the type of the variable, as a string
- * @param value the object value to bind to it
- * @return an indication of whether the binding succeeded
- */
- def bind(name: String, boundType: String, value: Any): IR.Result = {
- val binderName = newBinder()
-
- compileString("""
- |object %s {
- | var value: %s = _
- | def set(x: Any) = value = x.asInstanceOf[%s]
- |}
- """.stripMargin.format(binderName, boundType, boundType))
-
- val binderObject = loadByName(binderName)
- val setterMethod = methodByName(binderObject, "set")
-
- setterMethod.invoke(null, value.asInstanceOf[AnyRef])
- interpret("val %s = %s.value".format(name, binderName))
- }
-
- def quietBind(name: String, boundType: String, value: Any): IR.Result =
- beQuietDuring { bind(name, boundType, value) }
-
- /** Reset this interpreter, forgetting all user-specified requests. */
- def reset() {
- //virtualDirectory.clear
- virtualDirectory.delete
- virtualDirectory.create
- resetClassLoader()
- lineNameCreator.reset()
- varNameCreator.reset()
- prevRequests.clear
- }
-
- /** <p>
- * This instance is no longer needed, so release any resources
- * it is using. The reporter's output gets flushed.
- * </p>
- */
- def close() {
- reporter.flush
- classServer.stop()
- }
-
- /** A traverser that finds all mentioned identifiers, i.e. things
- * that need to be imported. It might return extra names.
- */
- private class ImportVarsTraverser extends Traverser {
- val importVars = new HashSet[Name]()
-
- override def traverse(ast: Tree) = ast match {
- // XXX this is obviously inadequate but it's going to require some effort
- // to get right.
- case Ident(name) if !(name.toString startsWith "x$") => importVars += name
- case _ => super.traverse(ast)
- }
- }
-
- /** Class to handle one member among all the members included
- * in a single interpreter request.
- */
- private sealed abstract class MemberHandler(val member: Tree) {
- lazy val usedNames: List[Name] = {
- val ivt = new ImportVarsTraverser()
- ivt traverse member
- ivt.importVars.toList
- }
- def boundNames: List[Name] = Nil
- val definesImplicit = cond(member) {
- case tree: MemberDef => tree.mods hasFlag Flags.IMPLICIT
- }
- def generatesValue: Option[Name] = None
-
- def extraCodeToEvaluate(req: Request, code: PrintWriter) { }
- def resultExtractionCode(req: Request, code: PrintWriter) { }
-
- override def toString = "%s(used = %s)".format(this.getClass.toString split '.' last, usedNames)
- }
-
- private class GenericHandler(member: Tree) extends MemberHandler(member)
-
- private class ValHandler(member: ValDef) extends MemberHandler(member) {
- lazy val ValDef(mods, vname, _, _) = member
- lazy val prettyName = NameTransformer.decode(vname)
- lazy val isLazy = mods hasFlag Flags.LAZY
-
- override lazy val boundNames = List(vname)
- override def generatesValue = Some(vname)
-
- override def resultExtractionCode(req: Request, code: PrintWriter) {
- val isInternal = isGeneratedVarName(vname) && req.typeOfEnc(vname) == "Unit"
- if (!mods.isPublic || isInternal) return
-
- lazy val extractor = "scala.runtime.ScalaRunTime.stringOf(%s)".format(req fullPath vname)
-
- // if this is a lazy val we avoid evaluating it here
- val resultString = if (isLazy) codegenln(false, "<lazy>") else extractor
- val codeToPrint =
- """ + "%s: %s = " + %s""".format(prettyName, string2code(req typeOf vname), resultString)
-
- code print codeToPrint
- }
- }
-
- private class DefHandler(defDef: DefDef) extends MemberHandler(defDef) {
- lazy val DefDef(mods, name, _, vparamss, _, _) = defDef
- override lazy val boundNames = List(name)
- // true if 0-arity
- override def generatesValue =
- if (vparamss.isEmpty || vparamss.head.isEmpty) Some(name)
- else None
-
- override def resultExtractionCode(req: Request, code: PrintWriter) =
- if (mods.isPublic) code print codegenln(name, ": ", req.typeOf(name))
- }
-
- private class AssignHandler(member: Assign) extends MemberHandler(member) {
- val lhs = member.lhs.asInstanceOf[Ident] // an unfortunate limitation
- val helperName = newTermName(synthVarNameCreator())
- override def generatesValue = Some(helperName)
-
- override def extraCodeToEvaluate(req: Request, code: PrintWriter) =
- code println """val %s = %s""".format(helperName, lhs)
-
- /** Print out lhs instead of the generated varName */
- override def resultExtractionCode(req: Request, code: PrintWriter) {
- val lhsType = string2code(req typeOfEnc helperName)
- val res = string2code(req fullPath helperName)
- val codeToPrint = """ + "%s: %s = " + %s + "\n" """.format(lhs, lhsType, res)
-
- code println codeToPrint
- }
- }
-
- private class ModuleHandler(module: ModuleDef) extends MemberHandler(module) {
- lazy val ModuleDef(mods, name, _) = module
- override lazy val boundNames = List(name)
- override def generatesValue = Some(name)
-
- override def resultExtractionCode(req: Request, code: PrintWriter) =
- code println codegenln("defined module ", name)
- }
-
- private class ClassHandler(classdef: ClassDef) extends MemberHandler(classdef) {
- lazy val ClassDef(mods, name, _, _) = classdef
- override lazy val boundNames =
- name :: (if (mods hasFlag Flags.CASE) List(name.toTermName) else Nil)
-
- override def resultExtractionCode(req: Request, code: PrintWriter) =
- code print codegenln("defined %s %s".format(classdef.keyword, name))
- }
-
- private class TypeAliasHandler(typeDef: TypeDef) extends MemberHandler(typeDef) {
- lazy val TypeDef(mods, name, _, _) = typeDef
- def isAlias() = mods.isPublic && compiler.treeInfo.isAliasTypeDef(typeDef)
- override lazy val boundNames = if (isAlias) List(name) else Nil
-
- override def resultExtractionCode(req: Request, code: PrintWriter) =
- code println codegenln("defined type alias ", name)
- }
-
- private class ImportHandler(imp: Import) extends MemberHandler(imp) {
- lazy val Import(expr, selectors) = imp
- def targetType = stringToCompilerType(expr.toString) match {
- case NoType => None
- case x => Some(x)
- }
-
- private def selectorWild = selectors filter (_.name == USCOREkw) // wildcard imports, e.g. import foo._
- private def selectorMasked = selectors filter (_.rename == USCOREkw) // masking imports, e.g. import foo.{ bar => _ }
- private def selectorNames = selectors map (_.name)
- private def selectorRenames = selectors map (_.rename) filterNot (_ == null)
-
- /** Whether this import includes a wildcard import */
- val importsWildcard = selectorWild.nonEmpty
-
- /** Complete list of names imported by a wildcard */
- def wildcardImportedNames: List[Name] = (
- for (tpe <- targetType ; if importsWildcard) yield
- tpe.nonPrivateMembers filter (x => x.isMethod && x.isPublic) map (_.name) distinct
- ).toList.flatten
-
- /** The individual names imported by this statement */
- /** XXX come back to this and see what can be done with wildcards now that
- * we know how to enumerate the identifiers.
- */
- val importedNames: List[Name] =
- selectorRenames filterNot (_ == USCOREkw) flatMap (x => List(x.toTypeName, x.toTermName))
-
- override def resultExtractionCode(req: Request, code: PrintWriter) =
- code println codegenln(imp.toString)
- }
-
- /** One line of code submitted by the user for interpretation */
- private class Request(val line: String, val lineName: String, val trees: List[Tree]) {
- /** name to use for the object that will compute "line" */
- def objectName = lineName + INTERPRETER_WRAPPER_SUFFIX
-
- /** name of the object that retrieves the result from the above object */
- def resultObjectName = "RequestResult$" + objectName
-
- /** handlers for each tree in this request */
- val handlers: List[MemberHandler] = trees map chooseHandler
-
- /** all (public) names defined by these statements */
- val boundNames = handlers flatMap (_.boundNames)
-
- /** list of names used by this expression */
- val usedNames: List[Name] = handlers flatMap (_.usedNames)
-
- /** def and val names */
- def defNames = partialFlatMap(handlers) { case x: DefHandler => x.boundNames }
- def valueNames = partialFlatMap(handlers) {
- case x: AssignHandler => List(x.helperName)
- case x: ValHandler => boundNames
- case x: ModuleHandler => List(x.name)
- }
-
- /** Code to import bound names from previous lines - accessPath is code to
- * append to objectName to access anything bound by request.
- */
- val ComputedImports(importsPreamble, importsTrailer, accessPath) =
- importsCode(Set.empty ++ usedNames)
-
- /** Code to access a variable with the specified name */
- def fullPath(vname: String): String = "%s.`%s`".format(objectName + ".INSTANCE" + accessPath, vname)
-
- /** Code to access a variable with the specified name */
- def fullPath(vname: Name): String = fullPath(vname.toString)
-
- /** the line of code to compute */
- def toCompute = line
-
- /** generate the source code for the object that computes this request */
- def objectSourceCode: String = stringFromWriter { code =>
- val preamble = """
- |@serializable class %s {
- | %s%s
- """.stripMargin.format(objectName, importsPreamble, indentCode(toCompute))
- val postamble = importsTrailer + "\n}"
-
- code println preamble
- handlers foreach { _.extraCodeToEvaluate(this, code) }
- code println postamble
-
- //create an object
- code.println("object " + objectName + " {")
- code.println(" val INSTANCE = new " + objectName + "();")
- code.println("}")
- }
-
- /** generate source code for the object that retrieves the result
- from objectSourceCode */
- def resultObjectSourceCode: String = stringFromWriter { code =>
- /** We only want to generate this code when the result
- * is a value which can be referred to as-is.
- */
- val valueExtractor = handlers.last.generatesValue match {
- case Some(vname) if typeOf contains vname =>
- """
- |lazy val scala_repl_value = {
- | scala_repl_result
- | %s
- |}""".stripMargin.format(fullPath(vname))
- case _ => ""
- }
-
- // first line evaluates object to make sure constructor is run
- // initial "" so later code can uniformly be: + etc
- val preamble = """
- |object %s {
- | %s
- | val scala_repl_result: String = {
- | %s
- | (""
- """.stripMargin.format(resultObjectName, valueExtractor, objectName + ".INSTANCE" + accessPath)
-
- val postamble = """
- | )
- | }
- |}
- """.stripMargin
-
- code println preamble
- if (printResults) {
- handlers foreach { _.resultExtractionCode(this, code) }
- }
- code println postamble
- }
-
- // compile the object containing the user's code
- lazy val objRun = compileAndSaveRun("<console>", objectSourceCode)
-
- // compile the result-extraction object
- lazy val extractionObjectRun = compileAndSaveRun("<console>", resultObjectSourceCode)
-
- lazy val loadedResultObject = loadByName(resultObjectName)
-
- def extractionValue(): Option[AnyRef] = {
- // ensure it has run
- extractionObjectRun
-
- // load it and retrieve the value
- try Some(loadedResultObject getMethod "scala_repl_value" invoke loadedResultObject)
- catch { case _: Exception => None }
- }
-
- /** Compile the object file. Returns whether the compilation succeeded.
- * If all goes well, the "types" map is computed. */
- def compile(): Boolean = {
- // error counting is wrong, hence interpreter may overlook failure - so we reset
- reporter.reset
-
- // compile the main object
- objRun
-
- // bail on error
- if (reporter.hasErrors)
- return false
-
- // extract and remember types
- typeOf
-
- // compile the result-extraction object
- extractionObjectRun
-
- // success
- !reporter.hasErrors
- }
-
- def atNextPhase[T](op: => T): T = compiler.atPhase(objRun.typerPhase.next)(op)
-
- /** The outermost wrapper object */
- lazy val outerResObjSym: Symbol = getMember(EmptyPackage, newTermName(objectName).toTypeName)
-
- /** The innermost object inside the wrapper, found by
- * following accessPath into the outer one. */
- lazy val resObjSym =
- accessPath.split("\\.").foldLeft(outerResObjSym) { (sym, name) =>
- if (name == "") sym else
- atNextPhase(sym.info member newTermName(name))
- }
-
- /* typeOf lookup with encoding */
- def typeOfEnc(vname: Name) = typeOf(compiler encode vname)
-
- /** Types of variables defined by this request. */
- lazy val typeOf: Map[Name, String] = {
- def getTypes(names: List[Name], nameMap: Name => Name): Map[Name, String] = {
- names.foldLeft(Map.empty[Name, String]) { (map, name) =>
- val rawType = atNextPhase(resObjSym.info.member(name).tpe)
- // the types are all =>T; remove the =>
- val cleanedType = rawType match {
- case compiler.PolyType(Nil, rt) => rt
- case rawType => rawType
- }
-
- map + (name -> atNextPhase(cleanedType.toString))
- }
- }
-
- getTypes(valueNames, nme.getterToLocal(_)) ++ getTypes(defNames, identity)
- }
-
- /** load and run the code using reflection */
- def loadAndRun: (String, Boolean) = {
- val resultValMethod: reflect.Method = loadedResultObject getMethod "scala_repl_result"
- // XXX if wrapperExceptions isn't type-annotated we crash scalac
- val wrapperExceptions: List[Class[_ <: Throwable]] =
- List(classOf[InvocationTargetException], classOf[ExceptionInInitializerError])
-
- /** We turn off the binding to accomodate ticket #2817 */
- def onErr: Catcher[(String, Boolean)] = {
- case t: Throwable if bindLastException =>
- withoutBindingLastException {
- quietBind("lastException", "java.lang.Throwable", t)
- (stringFromWriter(t.printStackTrace(_)), false)
- }
- }
-
- catching(onErr) {
- unwrapping(wrapperExceptions: _*) {
- (resultValMethod.invoke(loadedResultObject).toString, true)
- }
- }
- }
-
- override def toString = "Request(line=%s, %s trees)".format(line, trees.size)
- }
-
- /** A container class for methods to be injected into the repl
- * in power mode.
- */
- object power {
- lazy val compiler: repl.compiler.type = repl.compiler
- import compiler.{ phaseNames, atPhase, currentRun }
-
- def mkContext(code: String = "") = compiler.analyzer.rootContext(mkUnit(code))
- def mkAlias(name: String, what: String) = interpret("type %s = %s".format(name, what))
- def mkSourceFile(code: String) = new BatchSourceFile("<console>", code)
- def mkUnit(code: String) = new CompilationUnit(mkSourceFile(code))
-
- def mkTree(code: String): Tree = mkTrees(code).headOption getOrElse EmptyTree
- def mkTrees(code: String): List[Tree] = parse(code) getOrElse Nil
- def mkTypedTrees(code: String*): List[compiler.Tree] = {
- class TyperRun extends compiler.Run {
- override def stopPhase(name: String) = name == "superaccessors"
- }
-
- reporter.reset
- val run = new TyperRun
- run compileSources (code.toList.zipWithIndex map {
- case (s, i) => new BatchSourceFile("<console %d>".format(i), s)
- })
- run.units.toList map (_.body)
- }
- def mkTypedTree(code: String) = mkTypedTrees(code).head
- def mkType(id: String): compiler.Type = stringToCompilerType(id)
-
- def dump(): String = (
- ("Names used: " :: allUsedNames) ++
- ("\nIdentifiers: " :: unqualifiedIds)
- ) mkString " "
-
- lazy val allPhases: List[Phase] = phaseNames map (currentRun phaseNamed _)
- def atAllPhases[T](op: => T): List[(String, T)] = allPhases map (ph => (ph.name, atPhase(ph)(op)))
- def showAtAllPhases(op: => Any): Unit =
- atAllPhases(op.toString) foreach { case (ph, op) => Console.println("%15s -> %s".format(ph, op take 240)) }
- }
-
- def unleash(): Unit = beQuietDuring {
- interpret("import scala.tools.nsc._")
- repl.bind("repl", "spark.repl.SparkInterpreter", this)
- interpret("val global: repl.compiler.type = repl.compiler")
- interpret("val power: repl.power.type = repl.power")
- // interpret("val replVars = repl.replVars")
- }
-
- /** Artificial object demonstrating completion */
- // lazy val replVars = CompletionAware(
- // Map[String, CompletionAware](
- // "ids" -> CompletionAware(() => unqualifiedIds, completionAware _),
- // "synthVars" -> CompletionAware(() => allBoundNames filter isSynthVarName map (_.toString)),
- // "types" -> CompletionAware(() => allSeenTypes map (_.toString)),
- // "implicits" -> CompletionAware(() => allImplicits map (_.toString))
- // )
- // )
-
- /** Returns the name of the most recent interpreter result.
- * Mostly this exists so you can conveniently invoke methods on
- * the previous result.
- */
- def mostRecentVar: String =
- if (mostRecentlyHandledTree.isEmpty) ""
- else mostRecentlyHandledTree.get match {
- case x: ValOrDefDef => x.name
- case Assign(Ident(name), _) => name
- case ModuleDef(_, name, _) => name
- case _ => onull(varNameCreator.mostRecent)
- }
-
- private def requestForName(name: Name): Option[Request] =
- prevRequests.reverse find (_.boundNames contains name)
-
- private def requestForIdent(line: String): Option[Request] = requestForName(newTermName(line))
-
- def stringToCompilerType(id: String): compiler.Type = {
- // if it's a recognized identifier, the type of that; otherwise treat the
- // String like a value (e.g. scala.collection.Map) .
- def findType = typeForIdent(id) match {
- case Some(x) => definitions.getClass(newTermName(x)).tpe
- case _ => definitions.getModule(newTermName(id)).tpe
- }
-
- try findType catch { case _: MissingRequirementError => NoType }
- }
-
- def typeForIdent(id: String): Option[String] =
- requestForIdent(id) flatMap (x => x.typeOf get newTermName(id))
-
- def methodsOf(name: String) =
- evalExpr[List[String]](methodsCode(name)) map (x => NameTransformer.decode(getOriginalName(x)))
-
- def completionAware(name: String) = {
- // XXX working around "object is not a value" crash, i.e.
- // import java.util.ArrayList ; ArrayList.<tab>
- clazzForIdent(name) flatMap (_ => evalExpr[Option[CompletionAware]](asCompletionAwareCode(name)))
- }
-
- def extractionValueForIdent(id: String): Option[AnyRef] =
- requestForIdent(id) flatMap (_.extractionValue)
-
- /** Executes code looking for a manifest of type T.
- */
- def manifestFor[T: Manifest] =
- evalExpr[Manifest[T]]("""manifest[%s]""".format(manifest[T]))
-
- /** Executes code looking for an implicit value of type T.
- */
- def implicitFor[T: Manifest] = {
- val s = manifest[T].toString
- evalExpr[Option[T]]("{ def f(implicit x: %s = null): %s = x ; Option(f) }".format(s, s))
- // We don't use implicitly so as to fail without failing.
- // evalExpr[T]("""implicitly[%s]""".format(manifest[T]))
- }
- /** Executes code looking for an implicit conversion from the type
- * of the given identifier to CompletionAware.
- */
- def completionAwareImplicit[T](id: String) = {
- val f1string = "%s => %s".format(typeForIdent(id).get, classOf[CompletionAware].getName)
- val code = """{
- | def f(implicit x: (%s) = null): %s = x
- | val f1 = f
- | if (f1 == null) None else Some(f1(%s))
- |}""".stripMargin.format(f1string, f1string, id)
-
- evalExpr[Option[CompletionAware]](code)
- }
-
- def clazzForIdent(id: String): Option[Class[_]] =
- extractionValueForIdent(id) flatMap (x => Option(x) map (_.getClass))
-
- private def methodsCode(name: String) =
- "%s.%s(%s)".format(classOf[ReflectionCompletion].getName, "methodsOf", name)
-
- private def asCompletionAwareCode(name: String) =
- "%s.%s(%s)".format(classOf[CompletionAware].getName, "unapply", name)
-
- private def getOriginalName(name: String): String =
- nme.originalName(newTermName(name)).toString
-
- case class InterpreterEvalException(msg: String) extends Exception(msg)
- def evalError(msg: String) = throw InterpreterEvalException(msg)
-
- /** The user-facing eval in :power mode wraps an Option.
- */
- def eval[T: Manifest](line: String): Option[T] =
- try Some(evalExpr[T](line))
- catch { case InterpreterEvalException(msg) => out println indentString(msg) ; None }
-
- def evalExpr[T: Manifest](line: String): T = {
- // Nothing means the type could not be inferred.
- if (manifest[T] eq Manifest.Nothing)
- evalError("Could not infer type: try 'eval[SomeType](%s)' instead".format(line))
-
- val lhs = getSynthVarName
- beQuietDuring { interpret("val " + lhs + " = { " + line + " } ") }
-
- // TODO - can we meaningfully compare the inferred type T with
- // the internal compiler Type assigned to lhs?
- // def assignedType = prevRequests.last.typeOf(newTermName(lhs))
-
- val req = requestFromLine(lhs, true) match {
- case Left(result) => evalError(result.toString)
- case Right(req) => req
- }
- if (req == null || !req.compile || req.handlers.size != 1)
- evalError("Eval error.")
-
- try req.extractionValue.get.asInstanceOf[T] catch {
- case e: Exception => evalError(e.getMessage)
- }
- }
-
- def interpretExpr[T: Manifest](code: String): Option[T] = beQuietDuring {
- interpret(code) match {
- case IR.Success =>
- try prevRequests.last.extractionValue map (_.asInstanceOf[T])
- catch { case e: Exception => out println e ; None }
- case _ => None
- }
- }
-
- /** Another entry point for tab-completion, ids in scope */
- private def unqualifiedIdNames() = partialFlatMap(allHandlers) {
- case x: AssignHandler => List(x.helperName)
- case x: ValHandler => List(x.vname)
- case x: ModuleHandler => List(x.name)
- case x: DefHandler => List(x.name)
- case x: ImportHandler => x.importedNames
- } filterNot isSynthVarName
-
- /** Types which have been wildcard imported, such as:
- * val x = "abc" ; import x._ // type java.lang.String
- * import java.lang.String._ // object java.lang.String
- *
- * Used by tab completion.
- *
- * XXX right now this gets import x._ and import java.lang.String._,
- * but doesn't figure out import String._. There's a lot of ad hoc
- * scope twiddling which should be swept away in favor of digging
- * into the compiler scopes.
- */
- def wildcardImportedTypes(): List[Type] = {
- val xs = allHandlers collect { case x: ImportHandler if x.importsWildcard => x.targetType }
- xs.flatten.reverse.distinct
- }
-
- /** Another entry point for tab-completion, ids in scope */
- def unqualifiedIds() = (unqualifiedIdNames() map (_.toString)).distinct.sorted
-
- /** For static/object method completion */
- def getClassObject(path: String): Option[Class[_]] = //classLoader tryToLoadClass path
- try {
- Some(Class.forName(path, true, classLoader))
- } catch {
- case e: Exception => None
- }
-
- /** Parse the ScalaSig to find type aliases */
- def aliasForType(path: String) = ByteCode.aliasForType(path)
-
- // Coming soon
- // implicit def string2liftedcode(s: String): LiftedCode = new LiftedCode(s)
- // case class LiftedCode(code: String) {
- // val lifted: String = {
- // beQuietDuring { interpret(code) }
- // eval2[String]("({ " + code + " }).toString")
- // }
- // def >> : String = lifted
- // }
-
- // debugging
- def isReplDebug = settings.Yrepldebug.value
- def isCompletionDebug = settings.Ycompletion.value
- def DBG(s: String) = if (isReplDebug) out println s else ()
-}
-
-/** Utility methods for the Interpreter. */
-object SparkInterpreter {
-
- import scala.collection.generic.CanBuildFrom
- def partialFlatMap[A, B, CC[X] <: Traversable[X]]
- (coll: CC[A])
- (pf: PartialFunction[A, CC[B]])
- (implicit bf: CanBuildFrom[CC[A], B, CC[B]]) =
- {
- val b = bf(coll)
- for (x <- coll collect pf)
- b ++= x
-
- b.result
- }
-
- object DebugParam {
- implicit def tuple2debugparam[T](x: (String, T))(implicit m: Manifest[T]): DebugParam[T] =
- DebugParam(x._1, x._2)
-
- implicit def any2debugparam[T](x: T)(implicit m: Manifest[T]): DebugParam[T] =
- DebugParam("p" + getCount(), x)
-
- private var counter = 0
- def getCount() = { counter += 1; counter }
- }
- case class DebugParam[T](name: String, param: T)(implicit m: Manifest[T]) {
- val manifest = m
- val typeStr = {
- val str = manifest.toString
- // I'm sure there are more to be discovered...
- val regexp1 = """(.*?)\[(.*)\]""".r
- val regexp2str = """.*\.type#"""
- val regexp2 = (regexp2str + """(.*)""").r
-
- (str.replaceAll("""\n""", "")) match {
- case regexp1(clazz, typeArgs) => "%s[%s]".format(clazz, typeArgs.replaceAll(regexp2str, ""))
- case regexp2(clazz) => clazz
- case _ => str
- }
- }
- }
- def breakIf(assertion: => Boolean, args: DebugParam[_]*): Unit =
- if (assertion) break(args.toList)
-
- // start a repl, binding supplied args
- def break(args: List[DebugParam[_]]): Unit = {
- val intLoop = new SparkInterpreterLoop
- intLoop.settings = new Settings(Console.println)
- // XXX come back to the dot handling
- intLoop.settings.classpath.value = "."
- intLoop.createInterpreter
- intLoop.in = SparkInteractiveReader.createDefault(intLoop.interpreter)
-
- // rebind exit so people don't accidentally call System.exit by way of predef
- intLoop.interpreter.beQuietDuring {
- intLoop.interpreter.interpret("""def exit = println("Type :quit to resume program execution.")""")
- for (p <- args) {
- intLoop.interpreter.bind(p.name, p.typeStr, p.param)
- Console println "%s: %s".format(p.name, p.typeStr)
- }
- }
- intLoop.repl()
- intLoop.closeInterpreter
- }
-
- def codegenln(leadingPlus: Boolean, xs: String*): String = codegen(leadingPlus, (xs ++ Array("\n")): _*)
- def codegenln(xs: String*): String = codegenln(true, xs: _*)
-
- def codegen(xs: String*): String = codegen(true, xs: _*)
- def codegen(leadingPlus: Boolean, xs: String*): String = {
- val front = if (leadingPlus) "+ " else ""
- front + (xs map string2codeQuoted mkString " + ")
- }
-
- def string2codeQuoted(str: String) = "\"" + string2code(str) + "\""
-
- /** Convert a string into code that can recreate the string.
- * This requires replacing all special characters by escape
- * codes. It does not add the surrounding " marks. */
- def string2code(str: String): String = {
- val res = new StringBuilder
- for (c <- str) c match {
- case '"' | '\'' | '\\' => res += '\\' ; res += c
- case _ if c.isControl => res ++= Chars.char2uescape(c)
- case _ => res += c
- }
- res.toString
- }
-}
-
diff --git a/repl/src/main/scala/spark/repl/SparkInterpreterLoop.scala b/repl/src/main/scala/spark/repl/SparkInterpreterLoop.scala
deleted file mode 100644
index 881c27d471..0000000000
--- a/repl/src/main/scala/spark/repl/SparkInterpreterLoop.scala
+++ /dev/null
@@ -1,662 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
- * @author Alexander Spoon
- */
-
-package spark.repl
-
-import scala.tools.nsc
-import scala.tools.nsc._
-
-import Predef.{ println => _, _ }
-import java.io.{ BufferedReader, FileReader, PrintWriter }
-import java.io.IOException
-
-import scala.tools.nsc.{ InterpreterResults => IR }
-import scala.annotation.tailrec
-import scala.collection.mutable.ListBuffer
-import scala.concurrent.ops
-import util.{ ClassPath }
-import interpreter._
-import io.{ File, Process }
-
-import spark.SparkContext
-
-// Classes to wrap up interpreter commands and their results
-// You can add new commands by adding entries to val commands
-// inside InterpreterLoop.
-trait InterpreterControl {
- self: SparkInterpreterLoop =>
-
- // the default result means "keep running, and don't record that line"
- val defaultResult = Result(true, None)
-
- // a single interpreter command
- sealed abstract class Command extends Function1[List[String], Result] {
- def name: String
- def help: String
- def error(msg: String) = {
- out.println(":" + name + " " + msg + ".")
- Result(true, None)
- }
- def usage(): String
- }
-
- case class NoArgs(name: String, help: String, f: () => Result) extends Command {
- def usage(): String = ":" + name
- def apply(args: List[String]) = if (args.isEmpty) f() else error("accepts no arguments")
- }
-
- case class LineArg(name: String, help: String, f: (String) => Result) extends Command {
- def usage(): String = ":" + name + " <line>"
- def apply(args: List[String]) = f(args mkString " ")
- }
-
- case class OneArg(name: String, help: String, f: (String) => Result) extends Command {
- def usage(): String = ":" + name + " <arg>"
- def apply(args: List[String]) =
- if (args.size == 1) f(args.head)
- else error("requires exactly one argument")
- }
-
- case class VarArgs(name: String, help: String, f: (List[String]) => Result) extends Command {
- def usage(): String = ":" + name + " [arg]"
- def apply(args: List[String]) = f(args)
- }
-
- // the result of a single command
- case class Result(keepRunning: Boolean, lineToRecord: Option[String])
-}
-
-/** The
- * <a href="http://scala-lang.org/" target="_top">Scala</a>
- * interactive shell. It provides a read-eval-print loop around
- * the Interpreter class.
- * After instantiation, clients should call the <code>main()</code> method.
- *
- * <p>If no in0 is specified, then input will come from the console, and
- * the class will attempt to provide input editing feature such as
- * input history.
- *
- * @author Moez A. Abdel-Gawad
- * @author Lex Spoon
- * @version 1.2
- */
-class SparkInterpreterLoop(
- in0: Option[BufferedReader], val out: PrintWriter, master: Option[String])
-extends InterpreterControl {
- def this(in0: BufferedReader, out: PrintWriter, master: String) =
- this(Some(in0), out, Some(master))
-
- def this(in0: BufferedReader, out: PrintWriter) =
- this(Some(in0), out, None)
-
- def this() = this(None, new PrintWriter(Console.out), None)
-
- /** The input stream from which commands come, set by main() */
- var in: SparkInteractiveReader = _
-
- /** The context class loader at the time this object was created */
- protected val originalClassLoader = Thread.currentThread.getContextClassLoader
-
- var settings: Settings = _ // set by main()
- var interpreter: SparkInterpreter = _ // set by createInterpreter()
-
- // classpath entries added via :cp
- var addedClasspath: String = ""
-
- /** A reverse list of commands to replay if the user requests a :replay */
- var replayCommandStack: List[String] = Nil
-
- /** A list of commands to replay if the user requests a :replay */
- def replayCommands = replayCommandStack.reverse
-
- /** Record a command for replay should the user request a :replay */
- def addReplay(cmd: String) = replayCommandStack ::= cmd
-
- /** Close the interpreter and set the var to <code>null</code>. */
- def closeInterpreter() {
- if (interpreter ne null) {
- interpreter.close
- interpreter = null
- Thread.currentThread.setContextClassLoader(originalClassLoader)
- }
- }
-
- /** Create a new interpreter. */
- def createInterpreter() {
- if (addedClasspath != "")
- settings.classpath append addedClasspath
-
- interpreter = new SparkInterpreter(settings, out) {
- override protected def parentClassLoader =
- classOf[SparkInterpreterLoop].getClassLoader
- }
- interpreter.setContextClassLoader()
- // interpreter.quietBind("settings", "spark.repl.SparkInterpreterSettings", interpreter.isettings)
- }
-
- /** print a friendly help message */
- def printHelp() = {
- out println "All commands can be abbreviated - for example :he instead of :help.\n"
- val cmds = commands map (x => (x.usage, x.help))
- val width: Int = cmds map { case (x, _) => x.length } max
- val formatStr = "%-" + width + "s %s"
- cmds foreach { case (usage, help) => out println formatStr.format(usage, help) }
- }
-
- /** Print a welcome message */
- def printWelcome() {
- plushln("""Welcome to
- ____ __
- / __/__ ___ _____/ /__
- _\ \/ _ \/ _ `/ __/ '_/
- /___/ .__/\_,_/_/ /_/\_\ version 0.4
- /_/
-""")
-
- import Properties._
- val welcomeMsg = "Using Scala %s (%s, Java %s)".format(
- versionString, javaVmName, javaVersion)
- plushln(welcomeMsg)
- }
-
- /** Show the history */
- def printHistory(xs: List[String]) {
- val defaultLines = 20
-
- if (in.history.isEmpty)
- return println("No history available.")
-
- val current = in.history.get.index
- val count = try xs.head.toInt catch { case _: Exception => defaultLines }
- val lines = in.historyList takeRight count
- val offset = current - lines.size + 1
-
- for ((line, index) <- lines.zipWithIndex)
- println("%d %s".format(index + offset, line))
- }
-
- /** Some print conveniences */
- def println(x: Any) = out println x
- def plush(x: Any) = { out print x ; out.flush() }
- def plushln(x: Any) = { out println x ; out.flush() }
-
- /** Search the history */
- def searchHistory(_cmdline: String) {
- val cmdline = _cmdline.toLowerCase
-
- if (in.history.isEmpty)
- return println("No history available.")
-
- val current = in.history.get.index
- val offset = current - in.historyList.size + 1
-
- for ((line, index) <- in.historyList.zipWithIndex ; if line.toLowerCase contains cmdline)
- println("%d %s".format(index + offset, line))
- }
-
- /** Prompt to print when awaiting input */
- val prompt = Properties.shellPromptString
-
- // most commands do not want to micromanage the Result, but they might want
- // to print something to the console, so we accomodate Unit and String returns.
- object CommandImplicits {
- implicit def u2ir(x: Unit): Result = defaultResult
- implicit def s2ir(s: String): Result = {
- out println s
- defaultResult
- }
- }
-
- /** Standard commands **/
- val standardCommands: List[Command] = {
- import CommandImplicits._
- List(
- OneArg("cp", "add an entry (jar or directory) to the classpath", addClasspath),
- NoArgs("help", "print this help message", printHelp),
- VarArgs("history", "show the history (optional arg: lines to show)", printHistory),
- LineArg("h?", "search the history", searchHistory),
- OneArg("load", "load and interpret a Scala file", load),
- NoArgs("power", "enable power user mode", power),
- NoArgs("quit", "exit the interpreter", () => Result(false, None)),
- NoArgs("replay", "reset execution and replay all previous commands", replay),
- LineArg("sh", "fork a shell and run a command", runShellCmd),
- NoArgs("silent", "disable/enable automatic printing of results", verbosity)
- )
- }
-
- /** Power user commands */
- var powerUserOn = false
- val powerCommands: List[Command] = {
- import CommandImplicits._
- List(
- OneArg("completions", "generate list of completions for a given String", completions),
- NoArgs("dump", "displays a view of the interpreter's internal state", () => interpreter.power.dump())
-
- // VarArgs("tree", "displays ASTs for specified identifiers",
- // (xs: List[String]) => interpreter dumpTrees xs)
- // LineArg("meta", "given code which produces scala code, executes the results",
- // (xs: List[String]) => )
- )
- }
-
- /** Available commands */
- def commands: List[Command] = standardCommands ::: (if (powerUserOn) powerCommands else Nil)
-
- def initializeSpark() {
- interpreter.beQuietDuring {
- command("""
- spark.repl.Main.interp.out.println("Registering with Mesos...");
- spark.repl.Main.interp.out.flush();
- @transient val sc = spark.repl.Main.interp.createSparkContext();
- sc.waitForRegister();
- spark.repl.Main.interp.out.println("Spark context available as sc.");
- spark.repl.Main.interp.out.flush();
- """)
- command("import spark.SparkContext._");
- }
- plushln("Type in expressions to have them evaluated.")
- plushln("Type :help for more information.")
- }
-
- var sparkContext: SparkContext = null
-
- def createSparkContext(): SparkContext = {
- val master = this.master match {
- case Some(m) => m
- case None => {
- val prop = System.getenv("MASTER")
- if (prop != null) prop else "local"
- }
- }
- sparkContext = new SparkContext(master, "Spark shell")
- sparkContext
- }
-
- /** The main read-eval-print loop for the interpreter. It calls
- * <code>command()</code> for each line of input, and stops when
- * <code>command()</code> returns <code>false</code>.
- */
- def repl() {
- def readOneLine() = {
- out.flush
- in readLine prompt
- }
- // return false if repl should exit
- def processLine(line: String): Boolean =
- if (line eq null) false // assume null means EOF
- else command(line) match {
- case Result(false, _) => false
- case Result(_, Some(finalLine)) => addReplay(finalLine) ; true
- case _ => true
- }
-
- while (processLine(readOneLine)) { }
- }
-
- /** interpret all lines from a specified file */
- def interpretAllFrom(file: File) {
- val oldIn = in
- val oldReplay = replayCommandStack
-
- try file applyReader { reader =>
- in = new SparkSimpleReader(reader, out, false)
- plushln("Loading " + file + "...")
- repl()
- }
- finally {
- in = oldIn
- replayCommandStack = oldReplay
- }
- }
-
- /** create a new interpreter and replay all commands so far */
- def replay() {
- closeInterpreter()
- createInterpreter()
- for (cmd <- replayCommands) {
- plushln("Replaying: " + cmd) // flush because maybe cmd will have its own output
- command(cmd)
- out.println
- }
- }
-
- /** fork a shell and run a command */
- def runShellCmd(line: String) {
- // we assume if they're using :sh they'd appreciate being able to pipeline
- interpreter.beQuietDuring {
- interpreter.interpret("import _root_.scala.tools.nsc.io.Process.Pipe._")
- }
- val p = Process(line)
- // only bind non-empty streams
- def add(name: String, it: Iterator[String]) =
- if (it.hasNext) interpreter.bind(name, "scala.List[String]", it.toList)
-
- List(("stdout", p.stdout), ("stderr", p.stderr)) foreach (add _).tupled
- }
-
- def withFile(filename: String)(action: File => Unit) {
- val f = File(filename)
-
- if (f.exists) action(f)
- else out.println("That file does not exist")
- }
-
- def load(arg: String) = {
- var shouldReplay: Option[String] = None
- withFile(arg)(f => {
- interpretAllFrom(f)
- shouldReplay = Some(":load " + arg)
- })
- Result(true, shouldReplay)
- }
-
- def addClasspath(arg: String): Unit = {
- val f = File(arg).normalize
- if (f.exists) {
- addedClasspath = ClassPath.join(addedClasspath, f.path)
- val totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath)
- println("Added '%s'. Your new classpath is:\n%s".format(f.path, totalClasspath))
- replay()
- }
- else out.println("The path '" + f + "' doesn't seem to exist.")
- }
-
- def completions(arg: String): Unit = {
- val comp = in.completion getOrElse { return println("Completion unavailable.") }
- val xs = comp completions arg
-
- injectAndName(xs)
- }
-
- def power() {
- val powerUserBanner =
- """** Power User mode enabled - BEEP BOOP **
- |** scala.tools.nsc._ has been imported **
- |** New vals! Try repl, global, power **
- |** New cmds! :help to discover them **
- |** New defs! Type power.<tab> to reveal **""".stripMargin
-
- powerUserOn = true
- interpreter.unleash()
- injectOne("history", in.historyList)
- in.completion foreach (x => injectOne("completion", x))
- out println powerUserBanner
- }
-
- def verbosity() = {
- val old = interpreter.printResults
- interpreter.printResults = !old
- out.println("Switched " + (if (old) "off" else "on") + " result printing.")
- }
-
- /** Run one command submitted by the user. Two values are returned:
- * (1) whether to keep running, (2) the line to record for replay,
- * if any. */
- def command(line: String): Result = {
- def withError(msg: String) = {
- out println msg
- Result(true, None)
- }
- def ambiguous(cmds: List[Command]) = "Ambiguous: did you mean " + cmds.map(":" + _.name).mkString(" or ") + "?"
-
- // not a command
- if (!line.startsWith(":")) {
- // Notice failure to create compiler
- if (interpreter.compiler == null) return Result(false, None)
- else return Result(true, interpretStartingWith(line))
- }
-
- val tokens = (line drop 1 split """\s+""").toList
- if (tokens.isEmpty)
- return withError(ambiguous(commands))
-
- val (cmd :: args) = tokens
-
- // this lets us add commands willy-nilly and only requires enough command to disambiguate
- commands.filter(_.name startsWith cmd) match {
- case List(x) => x(args)
- case Nil => withError("Unknown command. Type :help for help.")
- case xs => withError(ambiguous(xs))
- }
- }
-
- private val CONTINUATION_STRING = " | "
- private val PROMPT_STRING = "scala> "
-
- /** If it looks like they're pasting in a scala interpreter
- * transcript, remove all the formatting we inserted so we
- * can make some sense of it.
- */
- private var pasteStamp: Long = 0
-
- /** Returns true if it's long enough to quit. */
- def updatePasteStamp(): Boolean = {
- /* Enough milliseconds between readLines to call it a day. */
- val PASTE_FINISH = 1000
-
- val prevStamp = pasteStamp
- pasteStamp = System.currentTimeMillis
-
- (pasteStamp - prevStamp > PASTE_FINISH)
-
- }
- /** TODO - we could look for the usage of resXX variables in the transcript.
- * Right now backreferences to auto-named variables will break.
- */
-
- /** The trailing lines complication was an attempt to work around the introduction
- * of newlines in e.g. email messages of repl sessions. It doesn't work because
- * an unlucky newline can always leave you with a syntactically valid first line,
- * which is executed before the next line is considered. So this doesn't actually
- * accomplish anything, but I'm leaving it in case I decide to try harder.
- */
- case class PasteCommand(cmd: String, trailing: ListBuffer[String] = ListBuffer[String]())
-
- /** Commands start on lines beginning with "scala>" and each successive
- * line which begins with the continuation string is appended to that command.
- * Everything else is discarded. When the end of the transcript is spotted,
- * all the commands are replayed.
- */
- @tailrec private def cleanTranscript(lines: List[String], acc: List[PasteCommand]): List[PasteCommand] = lines match {
- case Nil => acc.reverse
- case x :: xs if x startsWith PROMPT_STRING =>
- val first = x stripPrefix PROMPT_STRING
- val (xs1, xs2) = xs span (_ startsWith CONTINUATION_STRING)
- val rest = xs1 map (_ stripPrefix CONTINUATION_STRING)
- val result = (first :: rest).mkString("", "\n", "\n")
-
- cleanTranscript(xs2, PasteCommand(result) :: acc)
-
- case ln :: lns =>
- val newacc = acc match {
- case Nil => Nil
- case PasteCommand(cmd, trailing) :: accrest =>
- PasteCommand(cmd, trailing :+ ln) :: accrest
- }
- cleanTranscript(lns, newacc)
- }
-
- /** The timestamp is for safety so it doesn't hang looking for the end
- * of a transcript. Ad hoc parsing can't be too demanding. You can
- * also use ctrl-D to start it parsing.
- */
- @tailrec private def interpretAsPastedTranscript(lines: List[String]) {
- val line = in.readLine("")
- val finished = updatePasteStamp()
-
- if (line == null || finished || line.trim == PROMPT_STRING.trim) {
- val xs = cleanTranscript(lines.reverse, Nil)
- println("Replaying %d commands from interpreter transcript." format xs.size)
- for (PasteCommand(cmd, trailing) <- xs) {
- out.flush()
- def runCode(code: String, extraLines: List[String]) {
- (interpreter interpret code) match {
- case IR.Incomplete if extraLines.nonEmpty =>
- runCode(code + "\n" + extraLines.head, extraLines.tail)
- case _ => ()
- }
- }
- runCode(cmd, trailing.toList)
- }
- }
- else
- interpretAsPastedTranscript(line :: lines)
- }
-
- /** Interpret expressions starting with the first line.
- * Read lines until a complete compilation unit is available
- * or until a syntax error has been seen. If a full unit is
- * read, go ahead and interpret it. Return the full string
- * to be recorded for replay, if any.
- */
- def interpretStartingWith(code: String): Option[String] = {
- // signal completion non-completion input has been received
- in.completion foreach (_.resetVerbosity())
-
- def reallyInterpret = interpreter.interpret(code) match {
- case IR.Error => None
- case IR.Success => Some(code)
- case IR.Incomplete =>
- if (in.interactive && code.endsWith("\n\n")) {
- out.println("You typed two blank lines. Starting a new command.")
- None
- }
- else in.readLine(CONTINUATION_STRING) match {
- case null =>
- // we know compilation is going to fail since we're at EOF and the
- // parser thinks the input is still incomplete, but since this is
- // a file being read non-interactively we want to fail. So we send
- // it straight to the compiler for the nice error message.
- interpreter.compileString(code)
- None
-
- case line => interpretStartingWith(code + "\n" + line)
- }
- }
-
- /** Here we place ourselves between the user and the interpreter and examine
- * the input they are ostensibly submitting. We intervene in several cases:
- *
- * 1) If the line starts with "scala> " it is assumed to be an interpreter paste.
- * 2) If the line starts with "." (but not ".." or "./") it is treated as an invocation
- * on the previous result.
- * 3) If the Completion object's execute returns Some(_), we inject that value
- * and avoid the interpreter, as it's likely not valid scala code.
- */
- if (code == "") None
- else if (code startsWith PROMPT_STRING) {
- updatePasteStamp()
- interpretAsPastedTranscript(List(code))
- None
- }
- else if (Completion.looksLikeInvocation(code) && interpreter.mostRecentVar != "") {
- interpretStartingWith(interpreter.mostRecentVar + code)
- }
- else {
- val result = for (comp <- in.completion ; res <- comp execute code) yield res
- result match {
- case Some(res) => injectAndName(res) ; None // completion took responsibility, so do not parse
- case _ => reallyInterpret
- }
- }
- }
-
- // runs :load <file> on any files passed via -i
- def loadFiles(settings: Settings) = settings match {
- case settings: GenericRunnerSettings =>
- for (filename <- settings.loadfiles.value) {
- val cmd = ":load " + filename
- command(cmd)
- addReplay(cmd)
- out.println()
- }
- case _ =>
- }
-
- def main(settings: Settings) {
- this.settings = settings
- createInterpreter()
-
- // sets in to some kind of reader depending on environmental cues
- in = in0 match {
- case Some(in0) => new SparkSimpleReader(in0, out, true)
- case None =>
- // the interpreter is passed as an argument to expose tab completion info
- if (settings.Xnojline.value || Properties.isEmacsShell) new SparkSimpleReader
- else if (settings.noCompletion.value) SparkInteractiveReader.createDefault()
- else SparkInteractiveReader.createDefault(interpreter)
- }
-
- loadFiles(settings)
- try {
- // it is broken on startup; go ahead and exit
- if (interpreter.reporter.hasErrors) return
-
- printWelcome()
-
- // this is about the illusion of snappiness. We call initialize()
- // which spins off a separate thread, then print the prompt and try
- // our best to look ready. Ideally the user will spend a
- // couple seconds saying "wow, it starts so fast!" and by the time
- // they type a command the compiler is ready to roll.
- interpreter.initialize()
- initializeSpark()
- repl()
- }
- finally closeInterpreter()
- }
-
- private def objClass(x: Any) = x.asInstanceOf[AnyRef].getClass
- private def objName(x: Any) = {
- val clazz = objClass(x)
- val typeParams = clazz.getTypeParameters
- val basename = clazz.getName
- val tpString = if (typeParams.isEmpty) "" else "[%s]".format(typeParams map (_ => "_") mkString ", ")
-
- basename + tpString
- }
-
- // injects one value into the repl; returns pair of name and class
- def injectOne(name: String, obj: Any): Tuple2[String, String] = {
- val className = objName(obj)
- interpreter.quietBind(name, className, obj)
- (name, className)
- }
- def injectAndName(obj: Any): Tuple2[String, String] = {
- val name = interpreter.getVarName
- val className = objName(obj)
- interpreter.bind(name, className, obj)
- (name, className)
- }
-
- // injects list of values into the repl; returns summary string
- def injectDebug(args: List[Any]): String = {
- val strs =
- for ((arg, i) <- args.zipWithIndex) yield {
- val varName = "p" + (i + 1)
- val (vname, vtype) = injectOne(varName, arg)
- vname + ": " + vtype
- }
-
- if (strs.size == 0) "Set no variables."
- else "Variables set:\n" + strs.mkString("\n")
- }
-
- /** process command-line arguments and do as they request */
- def main(args: Array[String]) {
- def error1(msg: String) = out println ("scala: " + msg)
- val command = new InterpreterCommand(args.toList, error1)
- def neededHelp(): String =
- (if (command.settings.help.value) command.usageMsg + "\n" else "") +
- (if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "")
-
- // if they asked for no help and command is valid, we call the real main
- neededHelp() match {
- case "" => if (command.ok) main(command.settings) // else nothing
- case help => plush(help)
- }
- }
-}
-
diff --git a/repl/src/main/scala/spark/repl/SparkInterpreterSettings.scala b/repl/src/main/scala/spark/repl/SparkInterpreterSettings.scala
deleted file mode 100644
index ffa477785b..0000000000
--- a/repl/src/main/scala/spark/repl/SparkInterpreterSettings.scala
+++ /dev/null
@@ -1,112 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
- * @author Alexander Spoon
- */
-
-package spark.repl
-
-import scala.tools.nsc
-import scala.tools.nsc._
-
-/** Settings for the interpreter
- *
- * @version 1.0
- * @author Lex Spoon, 2007/3/24
- **/
-class SparkInterpreterSettings(repl: SparkInterpreter) {
- /** A list of paths where :load should look */
- var loadPath = List(".")
-
- /** The maximum length of toString to use when printing the result
- * of an evaluation. 0 means no maximum. If a printout requires
- * more than this number of characters, then the printout is
- * truncated.
- */
- var maxPrintString = 800
-
- /** The maximum number of completion candidates to print for tab
- * completion without requiring confirmation.
- */
- var maxAutoprintCompletion = 250
-
- /** String unwrapping can be disabled if it is causing issues.
- * Settings this to false means you will see Strings like "$iw.$iw.".
- */
- var unwrapStrings = true
-
- def deprecation_=(x: Boolean) = {
- val old = repl.settings.deprecation.value
- repl.settings.deprecation.value = x
- if (!old && x) println("Enabled -deprecation output.")
- else if (old && !x) println("Disabled -deprecation output.")
- }
- def deprecation: Boolean = repl.settings.deprecation.value
-
- def allSettings = Map(
- "maxPrintString" -> maxPrintString,
- "maxAutoprintCompletion" -> maxAutoprintCompletion,
- "unwrapStrings" -> unwrapStrings,
- "deprecation" -> deprecation
- )
-
- private def allSettingsString =
- allSettings.toList sortBy (_._1) map { case (k, v) => " " + k + " = " + v + "\n" } mkString
-
- override def toString = """
- | SparkInterpreterSettings {
- | %s
- | }""".stripMargin.format(allSettingsString)
-}
-
-/* Utilities for the InterpreterSettings class
- *
- * @version 1.0
- * @author Lex Spoon, 2007/5/24
- */
-object SparkInterpreterSettings {
- /** Source code for the InterpreterSettings class. This is
- * used so that the interpreter is sure to have the code
- * available.
- *
- * XXX I'm not seeing why this degree of defensiveness is necessary.
- * If files are missing the repl's not going to work, it's not as if
- * we have string source backups for anything else.
- */
- val sourceCodeForClass =
-"""
-package scala.tools.nsc
-
-/** Settings for the interpreter
- *
- * @version 1.0
- * @author Lex Spoon, 2007/3/24
- **/
-class SparkInterpreterSettings(repl: Interpreter) {
- /** A list of paths where :load should look */
- var loadPath = List(".")
-
- /** The maximum length of toString to use when printing the result
- * of an evaluation. 0 means no maximum. If a printout requires
- * more than this number of characters, then the printout is
- * truncated.
- */
- var maxPrintString = 2400
-
- def deprecation_=(x: Boolean) = {
- val old = repl.settings.deprecation.value
- repl.settings.deprecation.value = x
- if (!old && x) println("Enabled -deprecation output.")
- else if (old && !x) println("Disabled -deprecation output.")
- }
- def deprecation: Boolean = repl.settings.deprecation.value
-
- override def toString =
- "SparkInterpreterSettings {\n" +
-// " loadPath = " + loadPath + "\n" +
- " maxPrintString = " + maxPrintString + "\n" +
- "}"
-}
-
-"""
-
-}
diff --git a/repl/src/main/scala/spark/repl/SparkCompletion.scala b/repl/src/main/scala/spark/repl/SparkJLineCompletion.scala
index c6ed1860f0..5d12898732 100644
--- a/repl/src/main/scala/spark/repl/SparkCompletion.scala
+++ b/repl/src/main/scala/spark/repl/SparkJLineCompletion.scala
@@ -1,62 +1,31 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Paul Phillips
*/
-
package spark.repl
-import scala.tools.nsc
import scala.tools.nsc._
-import scala.tools.nsc.interpreter
import scala.tools.nsc.interpreter._
-import jline._
-import java.util.{ List => JList }
-import util.returning
-
-object SparkCompletion {
- def looksLikeInvocation(code: String) = (
- (code != null)
- && (code startsWith ".")
- && !(code == ".")
- && !(code startsWith "./")
- && !(code startsWith "..")
- )
-
- object Forwarder {
- def apply(forwardTo: () => Option[CompletionAware]): CompletionAware = new CompletionAware {
- def completions(verbosity: Int) = forwardTo() map (_ completions verbosity) getOrElse Nil
- override def follow(s: String) = forwardTo() flatMap (_ follow s)
- }
- }
-}
-import SparkCompletion._
+import scala.tools.jline._
+import scala.tools.jline.console.completer._
+import Completion._
+import collection.mutable.ListBuffer
// REPL completor - queries supplied interpreter for valid
// completions based on current contents of buffer.
-class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput {
- // verbosity goes up with consecutive tabs
- private var verbosity: Int = 0
- def resetVerbosity() = verbosity = 0
-
- def isCompletionDebug = repl.isCompletionDebug
- def DBG(msg: => Any) = if (isCompletionDebug) println(msg.toString)
- def debugging[T](msg: String): T => T = (res: T) => returning[T](res)(x => DBG(msg + x))
-
- lazy val global: repl.compiler.type = repl.compiler
+class JLineCompletion(val intp: SparkIMain) extends Completion with CompletionOutput {
+ val global: intp.global.type = intp.global
import global._
import definitions.{ PredefModule, RootClass, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage }
-
- // XXX not yet used.
- lazy val dottedPaths = {
- def walk(tp: Type): scala.List[Symbol] = {
- val pkgs = tp.nonPrivateMembers filter (_.isPackage)
- pkgs ++ (pkgs map (_.tpe) flatMap walk)
- }
- walk(RootClass.tpe)
- }
+ type ExecResult = Any
+ import intp.{ DBG, debugging, afterTyper }
+ // verbosity goes up with consecutive tabs
+ private var verbosity: Int = 0
+ def resetVerbosity() = verbosity = 0
+
def getType(name: String, isModule: Boolean) = {
val f = if (isModule) definitions.getModule(_: Name) else definitions.getClass(_: Name)
try Some(f(name).tpe)
@@ -69,53 +38,74 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
trait CompilerCompletion {
def tp: Type
def effectiveTp = tp match {
- case MethodType(Nil, resType) => resType
- case PolyType(Nil, resType) => resType
- case _ => tp
+ case MethodType(Nil, resType) => resType
+ case NullaryMethodType(resType) => resType
+ case _ => tp
}
// for some reason any's members don't show up in subclasses, which
// we need so 5.<tab> offers asInstanceOf etc.
private def anyMembers = AnyClass.tpe.nonPrivateMembers
- def anyRefMethodsToShow = List("isInstanceOf", "asInstanceOf", "toString")
+ def anyRefMethodsToShow = Set("isInstanceOf", "asInstanceOf", "toString")
def tos(sym: Symbol) = sym.name.decode.toString
def memberNamed(s: String) = members find (x => tos(x) == s)
def hasMethod(s: String) = methods exists (x => tos(x) == s)
-
+
// XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the
// compiler to crash for reasons not yet known.
- def members = (effectiveTp.nonPrivateMembers ++ anyMembers) filter (_.isPublic)
+ def members = afterTyper((effectiveTp.nonPrivateMembers ++ anyMembers) filter (_.isPublic))
def methods = members filter (_.isMethod)
def packages = members filter (_.isPackage)
def aliases = members filter (_.isAliasType)
-
+
def memberNames = members map tos
def methodNames = methods map tos
def packageNames = packages map tos
def aliasNames = aliases map tos
}
-
+
object TypeMemberCompletion {
+ def apply(tp: Type, runtimeType: Type, param: NamedParam): TypeMemberCompletion = {
+ new TypeMemberCompletion(tp) {
+ var upgraded = false
+ lazy val upgrade = {
+ intp rebind param
+ intp.reporter.printMessage("\nRebinding stable value %s from %s to %s".format(param.name, tp, param.tpe))
+ upgraded = true
+ new TypeMemberCompletion(runtimeType)
+ }
+ override def completions(verbosity: Int) = {
+ super.completions(verbosity) ++ (
+ if (verbosity == 0) Nil
+ else upgrade.completions(verbosity)
+ )
+ }
+ override def follow(s: String) = super.follow(s) orElse {
+ if (upgraded) upgrade.follow(s)
+ else None
+ }
+ override def alternativesFor(id: String) = super.alternativesFor(id) ++ (
+ if (upgraded) upgrade.alternativesFor(id)
+ else Nil
+ ) distinct
+ }
+ }
def apply(tp: Type): TypeMemberCompletion = {
if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
else new TypeMemberCompletion(tp)
}
def imported(tp: Type) = new ImportCompletion(tp)
}
-
- class TypeMemberCompletion(val tp: Type) extends CompletionAware with CompilerCompletion {
+
+ class TypeMemberCompletion(val tp: Type) extends CompletionAware
+ with CompilerCompletion {
def excludeEndsWith: List[String] = Nil
def excludeStartsWith: List[String] = List("<") // <byname>, <repeated>, etc.
- def excludeNames: List[String] = anyref.methodNames.filterNot(anyRefMethodsToShow.contains) ++ List("_root_")
+ def excludeNames: List[String] = (anyref.methodNames filterNot anyRefMethodsToShow) :+ "_root_"
def methodSignatureString(sym: Symbol) = {
- def asString = new MethodSymbolOutput(sym).methodString()
-
- if (isCompletionDebug)
- repl.power.showAtAllPhases(asString)
-
- atPhase(currentRun.typerPhase)(asString)
+ SparkIMain stripString afterTyper(new MethodSymbolOutput(sym).methodString())
}
def exclude(name: String): Boolean = (
@@ -139,7 +129,7 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
if (alts.nonEmpty) "" :: alts else Nil
}
- override def toString = "TypeMemberCompletion(%s)".format(tp)
+ override def toString = "%s (%d members)".format(tp, members.size)
}
class PackageCompletion(tp: Type) extends TypeMemberCompletion(tp) {
@@ -165,32 +155,36 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
// the unqualified vals/defs/etc visible in the repl
object ids extends CompletionAware {
- override def completions(verbosity: Int) = repl.unqualifiedIds ::: List("classOf")
- // we try to use the compiler and fall back on reflection if necessary
- // (which at present is for anything defined in the repl session.)
- override def follow(id: String) =
+ override def completions(verbosity: Int) = intp.unqualifiedIds ++ List("classOf") //, "_root_")
+ // now we use the compiler for everything.
+ override def follow(id: String) = {
if (completions(0) contains id) {
- for (clazz <- repl clazzForIdent id) yield {
- // XXX The isMemberClass check is a workaround for the crasher described
- // in the comments of #3431. The issue as described by iulian is:
- //
- // Inner classes exist as symbols
- // inside their enclosing class, but also inside their package, with a mangled
- // name (A$B). The mangled names should never be loaded, and exist only for the
- // optimizer, which sometimes cannot get the right symbol, but it doesn't care
- // and loads the bytecode anyway.
- //
- // So this solution is incorrect, but in the short term the simple fix is
- // to skip the compiler any time completion is requested on a nested class.
- if (clazz.isMemberClass) new InstanceCompletion(clazz)
- else (typeOf(clazz.getName) map TypeMemberCompletion.apply) getOrElse new InstanceCompletion(clazz)
+ intp typeOfExpression id map { tpe =>
+ def default = TypeMemberCompletion(tpe)
+
+ // only rebinding vals in power mode for now.
+ if (!isReplPower) default
+ else intp runtimeClassAndTypeOfTerm id match {
+ case Some((clazz, runtimeType)) =>
+ val sym = intp.symbolOfTerm(id)
+ if (sym.isStable) {
+ val param = new NamedParam.Untyped(id, intp valueOfTerm id getOrElse null)
+ TypeMemberCompletion(tpe, runtimeType, param)
+ }
+ else default
+ case _ =>
+ default
+ }
}
}
- else None
+ else
+ None
+ }
+ override def toString = "<repl ids> (%s)".format(completions(0).size)
}
- // wildcard imports in the repl like "import global._" or "import String._"
- private def imported = repl.wildcardImportedTypes map TypeMemberCompletion.imported
+ // user-issued wildcard imports like "import global._" or "import String._"
+ private def imported = intp.sessionWildcards map TypeMemberCompletion.imported
// literal Ints, Strings, etc.
object literals extends CompletionAware {
@@ -211,7 +205,13 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
}
// top level packages
- object rootClass extends TypeMemberCompletion(RootClass.tpe) { }
+ object rootClass extends TypeMemberCompletion(RootClass.tpe) {
+ override def completions(verbosity: Int) = super.completions(verbosity) :+ "_root_"
+ override def follow(id: String) = id match {
+ case "_root_" => Some(this)
+ case _ => super.follow(id)
+ }
+ }
// members of Predef
object predef extends TypeMemberCompletion(PredefModule.tpe) {
override def excludeEndsWith = super.excludeEndsWith ++ List("Wrapper", "ArrayOps")
@@ -252,14 +252,25 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
}
// the list of completion aware objects which should be consulted
+ // for top level unqualified, it's too noisy to let much in.
lazy val topLevelBase: List[CompletionAware] = List(ids, rootClass, predef, scalalang, javalang, literals)
def topLevel = topLevelBase ++ imported
+ def topLevelThreshold = 50
// the first tier of top level objects (doesn't include file completion)
- def topLevelFor(parsed: Parsed) = topLevel flatMap (_ completionsFor parsed)
+ def topLevelFor(parsed: Parsed): List[String] = {
+ val buf = new ListBuffer[String]
+ topLevel foreach { ca =>
+ buf ++= (ca completionsFor parsed)
+
+ if (buf.size > topLevelThreshold)
+ return buf.toList.sorted
+ }
+ buf.toList
+ }
// the most recent result
- def lastResult = Forwarder(() => ids follow repl.mostRecentVar)
+ def lastResult = Forwarder(() => ids follow intp.mostRecentVar)
def lastResultFor(parsed: Parsed) = {
/** The logic is a little tortured right now because normally '.' is
@@ -268,9 +279,9 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
val xs = lastResult completionsFor parsed
if (parsed.isEmpty) xs map ("." + _) else xs
}
-
+
// chasing down results which won't parse
- def execute(line: String): Option[Any] = {
+ def execute(line: String): Option[ExecResult] = {
val parsed = Parsed(line)
def noDotOrSlash = line forall (ch => ch != '.' && ch != '/')
@@ -286,9 +297,7 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
def completions(buf: String): List[String] =
topLevelFor(Parsed.dotted(buf + ".", buf.length + 1))
- // jline's entry point
- lazy val jline: ArgumentCompletor =
- returning(new ArgumentCompletor(new JLineCompletion, new JLineDelimiter))(_ setStrict false)
+ def completer(): ScalaCompleter = new JLineTabCompletion
/** This gets a little bit hairy. It's no small feat delegating everything
* and also keeping track of exactly where the cursor is and where it's supposed
@@ -296,44 +305,47 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
* string in the list of completions, that means we are expanding a unique
* completion, so don't update the "last" buffer because it'll be wrong.
*/
- class JLineCompletion extends Completor {
+ class JLineTabCompletion extends ScalaCompleter {
// For recording the buffer on the last tab hit
private var lastBuf: String = ""
private var lastCursor: Int = -1
// Does this represent two consecutive tabs?
- def isConsecutiveTabs(buf: String, cursor: Int) = cursor == lastCursor && buf == lastBuf
-
+ def isConsecutiveTabs(buf: String, cursor: Int) =
+ cursor == lastCursor && buf == lastBuf
+
// Longest common prefix
- def commonPrefix(xs: List[String]) =
- if (xs.isEmpty) ""
- else xs.reduceLeft(_ zip _ takeWhile (x => x._1 == x._2) map (_._1) mkString)
+ def commonPrefix(xs: List[String]): String = {
+ if (xs.isEmpty || xs.contains("")) ""
+ else xs.head.head match {
+ case ch =>
+ if (xs.tail forall (_.head == ch)) "" + ch + commonPrefix(xs map (_.tail))
+ else ""
+ }
+ }
// This is jline's entry point for completion.
- override def complete(_buf: String, cursor: Int, candidates: java.util.List[java.lang.String]): Int = {
- val buf = onull(_buf)
+ override def complete(buf: String, cursor: Int): Candidates = {
verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
- DBG("complete(%s, %d) last = (%s, %d), verbosity: %s".format(buf, cursor, lastBuf, lastCursor, verbosity))
+ DBG("\ncomplete(%s, %d) last = (%s, %d), verbosity: %s".format(buf, cursor, lastBuf, lastCursor, verbosity))
// we don't try lower priority completions unless higher ones return no results.
- def tryCompletion(p: Parsed, completionFunction: Parsed => List[String]): Option[Int] = {
- completionFunction(p) match {
- case Nil => None
- case xs =>
- // modify in place and return the position
- xs.foreach(x => candidates.add(x))
-
- // update the last buffer unless this is an alternatives list
- if (xs contains "") Some(p.cursor)
- else {
- val advance = commonPrefix(xs)
- lastCursor = p.position + advance.length
- lastBuf = (buf take p.position) + advance
-
- DBG("tryCompletion(%s, _) lastBuf = %s, lastCursor = %s, p.position = %s".format(p, lastBuf, lastCursor, p.position))
- Some(p.position)
- }
- }
+ def tryCompletion(p: Parsed, completionFunction: Parsed => List[String]): Option[Candidates] = {
+ val winners = completionFunction(p)
+ if (winners.isEmpty)
+ return None
+ val newCursor =
+ if (winners contains "") p.cursor
+ else {
+ val advance = commonPrefix(winners)
+ lastCursor = p.position + advance.length
+ lastBuf = (buf take p.position) + advance
+ DBG("tryCompletion(%s, _) lastBuf = %s, lastCursor = %s, p.position = %s".format(
+ p, lastBuf, lastCursor, p.position))
+ p.position
+ }
+
+ Some(Candidates(newCursor, winners))
}
def mkDotted = Parsed.dotted(buf, cursor) withVerbosity verbosity
@@ -345,9 +357,23 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
else tryCompletion(Parsed.dotted(buf drop 1, cursor), lastResultFor)
def regularCompletion = tryCompletion(mkDotted, topLevelFor)
- def fileCompletion = tryCompletion(mkUndelimited, FileCompletion completionsFor _.buffer)
+ def fileCompletion =
+ if (!looksLikePath(buf)) None
+ else tryCompletion(mkUndelimited, FileCompletion completionsFor _.buffer)
- (lastResultCompletion orElse regularCompletion orElse fileCompletion) getOrElse cursor
+ /** This is the kickoff point for all manner of theoretically possible compiler
+ * unhappiness - fault may be here or elsewhere, but we don't want to crash the
+ * repl regardless. Hopefully catching Exception is enough, but because the
+ * compiler still throws some Errors it may not be.
+ */
+ try {
+ (lastResultCompletion orElse regularCompletion orElse fileCompletion) getOrElse Candidates(cursor, Nil)
+ }
+ catch {
+ case ex: Exception =>
+ DBG("Error: complete(%s, %s) provoked %s".format(buf, cursor, ex))
+ Candidates(cursor, List(" ", "<completion error: " + ex.getMessage + ">"))
+ }
}
}
}
diff --git a/repl/src/main/scala/spark/repl/SparkJLineReader.scala b/repl/src/main/scala/spark/repl/SparkJLineReader.scala
index 9d761c06fc..f7b0261ad0 100644
--- a/repl/src/main/scala/spark/repl/SparkJLineReader.scala
+++ b/repl/src/main/scala/spark/repl/SparkJLineReader.scala
@@ -1,38 +1,79 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Stepan Koltsov
*/
package spark.repl
-import scala.tools.nsc
import scala.tools.nsc._
-import scala.tools.nsc.interpreter
import scala.tools.nsc.interpreter._
-import java.io.File
-import jline.{ ConsoleReader, ArgumentCompletor, History => JHistory }
+import scala.tools.jline.console.ConsoleReader
+import scala.tools.jline.console.completer._
+import session._
+import scala.collection.JavaConverters._
+import Completion._
+import io.Streamable.slurp
/** Reads from the console using JLine */
-class SparkJLineReader(interpreter: SparkInterpreter) extends SparkInteractiveReader {
- def this() = this(null)
-
- override lazy val history = Some(History(consoleReader))
- override lazy val completion = Option(interpreter) map (x => new SparkCompletion(x))
+class SparkJLineReader(val completion: Completion) extends InteractiveReader {
+ val interactive = true
+ lazy val history: JLineHistory = JLineHistory()
+ lazy val keyBindings =
+ try KeyBinding parse slurp(term.getDefaultBindings)
+ catch { case _: Exception => Nil }
+
+ private def term = consoleReader.getTerminal()
+ def reset() = term.reset()
+ def init() = term.init()
- val consoleReader = {
- val r = new jline.ConsoleReader()
- r setHistory (History().jhistory)
- r setBellEnabled false
- completion foreach { c =>
- r addCompletor c.jline
- r setAutoprintThreshhold 250
+ def scalaToJline(tc: ScalaCompleter): Completer = new Completer {
+ def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = {
+ val buf = if (_buf == null) "" else _buf
+ val Candidates(newCursor, newCandidates) = tc.complete(buf, cursor)
+ newCandidates foreach (candidates add _)
+ newCursor
+ }
+ }
+
+ class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper {
+ // working around protected/trait/java insufficiencies.
+ def goBack(num: Int): Unit = back(num)
+ def readOneKey(prompt: String) = {
+ this.print(prompt)
+ this.flush()
+ this.readVirtualKey()
+ }
+ def eraseLine() = consoleReader.resetPromptLine("", "", 0)
+ def redrawLineAndFlush(): Unit = { flush() ; drawLine() ; flush() }
+
+ this setBellEnabled false
+ if (history ne NoHistory)
+ this setHistory history
+
+ if (completion ne NoCompletion) {
+ val argCompletor: ArgumentCompleter =
+ new ArgumentCompleter(new JLineDelimiter, scalaToJline(completion.completer()))
+ argCompletor setStrict false
+
+ this addCompleter argCompletor
+ this setAutoprintThreshold 400 // max completion candidates without warning
}
-
- r
}
+ val consoleReader: JLineConsoleReader = new JLineConsoleReader()
+
+ def currentLine: String = consoleReader.getCursorBuffer.buffer.toString
+ def redrawLine() = consoleReader.redrawLineAndFlush()
+ def eraseLine() = {
+ while (consoleReader.delete()) { }
+ // consoleReader.eraseLine()
+ }
def readOneLine(prompt: String) = consoleReader readLine prompt
- val interactive = true
+ def readOneKey(prompt: String) = consoleReader readOneKey prompt
}
+object JLineReader {
+ def apply(intp: SparkIMain): JLineReader = apply(new JLineCompletion(intp))
+ def apply(comp: Completion): JLineReader = new JLineReader(comp)
+}
diff --git a/repl/src/main/scala/spark/repl/SparkMemberHandlers.scala b/repl/src/main/scala/spark/repl/SparkMemberHandlers.scala
new file mode 100644
index 0000000000..2980dfcd76
--- /dev/null
+++ b/repl/src/main/scala/spark/repl/SparkMemberHandlers.scala
@@ -0,0 +1,207 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package spark.repl
+
+import scala.tools.nsc._
+import scala.tools.nsc.interpreter._
+
+import scala.collection.{ mutable, immutable }
+import scala.PartialFunction.cond
+import scala.reflect.NameTransformer
+import util.Chars
+
+trait SparkMemberHandlers {
+ val intp: SparkIMain
+
+ import intp.{ Request, global, naming, atPickler }
+ import global._
+ import naming._
+
+ private def codegenln(leadingPlus: Boolean, xs: String*): String = codegen(leadingPlus, (xs ++ Array("\n")): _*)
+ private def codegenln(xs: String*): String = codegenln(true, xs: _*)
+
+ private def codegen(xs: String*): String = codegen(true, xs: _*)
+ private def codegen(leadingPlus: Boolean, xs: String*): String = {
+ val front = if (leadingPlus) "+ " else ""
+ front + (xs map string2codeQuoted mkString " + ")
+ }
+ private implicit def name2string(name: Name) = name.toString
+
+ /** A traverser that finds all mentioned identifiers, i.e. things
+ * that need to be imported. It might return extra names.
+ */
+ private class ImportVarsTraverser extends Traverser {
+ val importVars = new mutable.HashSet[Name]()
+
+ override def traverse(ast: Tree) = ast match {
+ case Ident(name) =>
+ // XXX this is obviously inadequate but it's going to require some effort
+ // to get right.
+ if (name.toString startsWith "x$") ()
+ else importVars += name
+ case _ => super.traverse(ast)
+ }
+ }
+ private object ImportVarsTraverser {
+ def apply(member: Tree) = {
+ val ivt = new ImportVarsTraverser()
+ ivt traverse member
+ ivt.importVars.toList
+ }
+ }
+
+ def chooseHandler(member: Tree): MemberHandler = member match {
+ case member: DefDef => new DefHandler(member)
+ case member: ValDef => new ValHandler(member)
+ case member@Assign(Ident(_), _) => new AssignHandler(member)
+ case member: ModuleDef => new ModuleHandler(member)
+ case member: ClassDef => new ClassHandler(member)
+ case member: TypeDef => new TypeAliasHandler(member)
+ case member: Import => new ImportHandler(member)
+ case DocDef(_, documented) => chooseHandler(documented)
+ case member => new GenericHandler(member)
+ }
+
+ sealed abstract class MemberDefHandler(override val member: MemberDef) extends MemberHandler(member) {
+ def name: Name = member.name
+ def mods: Modifiers = member.mods
+ def keyword = member.keyword
+ def prettyName = NameTransformer.decode(name)
+
+ override def definesImplicit = member.mods.isImplicit
+ override def definesTerm: Option[TermName] = Some(name.toTermName) filter (_ => name.isTermName)
+ override def definesType: Option[TypeName] = Some(name.toTypeName) filter (_ => name.isTypeName)
+ }
+
+ /** Class to handle one member among all the members included
+ * in a single interpreter request.
+ */
+ sealed abstract class MemberHandler(val member: Tree) {
+ def definesImplicit = false
+ def definesValue = false
+ def isLegalTopLevel = member match {
+ case _: ModuleDef | _: ClassDef | _: Import => true
+ case _ => false
+ }
+
+ def definesTerm = Option.empty[TermName]
+ def definesType = Option.empty[TypeName]
+
+ lazy val referencedNames = ImportVarsTraverser(member)
+ def importedNames = List[Name]()
+ def definedNames = definesTerm.toList ++ definesType.toList
+ def definedOrImported = definedNames ++ importedNames
+
+ def extraCodeToEvaluate(req: Request): String = ""
+ def resultExtractionCode(req: Request): String = ""
+
+ private def shortName = this.getClass.toString split '.' last
+ override def toString = shortName + referencedNames.mkString(" (refs: ", ", ", ")")
+ }
+
+ class GenericHandler(member: Tree) extends MemberHandler(member)
+
+ class ValHandler(member: ValDef) extends MemberDefHandler(member) {
+ val maxStringElements = 1000 // no need to mkString billions of elements
+ override def definesValue = true
+
+ override def resultExtractionCode(req: Request): String = {
+ val isInternal = isUserVarName(name) && req.lookupTypeOf(name) == "Unit"
+ if (!mods.isPublic || isInternal) ""
+ else {
+ // if this is a lazy val we avoid evaluating it here
+ val resultString =
+ if (mods.isLazy) codegenln(false, "<lazy>")
+ else any2stringOf(req fullPath name, maxStringElements)
+
+ """ + "%s: %s = " + %s""".format(prettyName, string2code(req typeOf name), resultString)
+ }
+ }
+ }
+
+ class DefHandler(member: DefDef) extends MemberDefHandler(member) {
+ private def vparamss = member.vparamss
+ // true if 0-arity
+ override def definesValue = vparamss.isEmpty || vparamss.head.isEmpty
+ override def resultExtractionCode(req: Request) =
+ if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
+ }
+
+ class AssignHandler(member: Assign) extends MemberHandler(member) {
+ val lhs = member.lhs.asInstanceOf[Ident] // an unfortunate limitation
+ val name = newTermName(freshInternalVarName())
+
+ override def definesTerm = Some(name)
+ override def definesValue = true
+ override def extraCodeToEvaluate(req: Request) =
+ """val %s = %s""".format(name, lhs)
+
+ /** Print out lhs instead of the generated varName */
+ override def resultExtractionCode(req: Request) = {
+ val lhsType = string2code(req lookupTypeOf name)
+ val res = string2code(req fullPath name)
+
+ """ + "%s: %s = " + %s + "\n" """.format(lhs, lhsType, res) + "\n"
+ }
+ }
+
+ class ModuleHandler(module: ModuleDef) extends MemberDefHandler(module) {
+ override def definesTerm = Some(name)
+ override def definesValue = true
+
+ override def resultExtractionCode(req: Request) = codegenln("defined module ", name)
+ }
+
+ class ClassHandler(member: ClassDef) extends MemberDefHandler(member) {
+ override def definesType = Some(name.toTypeName)
+ override def definesTerm = Some(name.toTermName) filter (_ => mods.isCase)
+
+ override def resultExtractionCode(req: Request) =
+ codegenln("defined %s %s".format(keyword, name))
+ }
+
+ class TypeAliasHandler(member: TypeDef) extends MemberDefHandler(member) {
+ private def isAlias = mods.isPublic && treeInfo.isAliasTypeDef(member)
+ override def definesType = Some(name.toTypeName) filter (_ => isAlias)
+
+ override def resultExtractionCode(req: Request) =
+ codegenln("defined type alias ", name) + "\n"
+ }
+
+ class ImportHandler(imp: Import) extends MemberHandler(imp) {
+ val Import(expr, selectors) = imp
+ def targetType = intp.typeOfExpression("" + expr)
+
+ // wildcard imports, e.g. import foo._
+ private def selectorWild = selectors filter (_.name == nme.USCOREkw)
+ // renamed imports, e.g. import foo.{ bar => baz }
+ private def selectorRenames = selectors map (_.rename) filterNot (_ == null)
+
+ /** Whether this import includes a wildcard import */
+ val importsWildcard = selectorWild.nonEmpty
+
+ def implicitSymbols = importedSymbols filter (_.isImplicit)
+ def importedSymbols = individualSymbols ++ wildcardSymbols
+
+ lazy val individualSymbols: List[Symbol] =
+ atPickler(targetType.toList flatMap (tp => individualNames map (tp nonPrivateMember _)))
+
+ lazy val wildcardSymbols: List[Symbol] =
+ if (importsWildcard) atPickler(targetType.toList flatMap (_.nonPrivateMembers))
+ else Nil
+
+ /** Complete list of names imported by a wildcard */
+ lazy val wildcardNames: List[Name] = wildcardSymbols map (_.name)
+ lazy val individualNames: List[Name] = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames)
+
+ /** The names imported by this statement */
+ override lazy val importedNames: List[Name] = wildcardNames ++ individualNames
+ lazy val importsSymbolNamed: Set[String] = importedNames map (_.toString) toSet
+
+ def importString = imp.toString
+ override def resultExtractionCode(req: Request) = codegenln(importString) + "\n"
+ }
+}
diff --git a/repl/src/main/scala/spark/repl/SparkSimpleReader.scala b/repl/src/main/scala/spark/repl/SparkSimpleReader.scala
deleted file mode 100644
index 2b24c4bf63..0000000000
--- a/repl/src/main/scala/spark/repl/SparkSimpleReader.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
- * @author Stepan Koltsov
- */
-
-package spark.repl
-
-import scala.tools.nsc
-import scala.tools.nsc._
-import scala.tools.nsc.interpreter
-import scala.tools.nsc.interpreter._
-
-import java.io.{ BufferedReader, PrintWriter }
-import io.{ Path, File, Directory }
-
-/** Reads using standard JDK API */
-class SparkSimpleReader(
- in: BufferedReader,
- out: PrintWriter,
- val interactive: Boolean)
-extends SparkInteractiveReader {
- def this() = this(Console.in, new PrintWriter(Console.out), true)
- def this(in: File, out: PrintWriter, interactive: Boolean) = this(in.bufferedReader(), out, interactive)
-
- def close() = in.close()
- def readOneLine(prompt: String): String = {
- if (interactive) {
- out.print(prompt)
- out.flush()
- }
- in.readLine()
- }
-}
diff --git a/repl/src/test/scala/spark/repl/ReplSuite.scala b/repl/src/test/scala/spark/repl/ReplSuite.scala
index 829b1d934e..a9b1bee7e0 100644
--- a/repl/src/test/scala/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/spark/repl/ReplSuite.scala
@@ -22,7 +22,7 @@ class ReplSuite extends FunSuite {
}
}
}
- val interp = new SparkInterpreterLoop(in, new PrintWriter(out), master)
+ val interp = new SparkILoop(in, new PrintWriter(out), master)
spark.repl.Main.interp = interp
val separator = System.getProperty("path.separator")
interp.main(Array("-classpath", paths.mkString(separator)))
diff --git a/run b/run
index f7e5a82a92..253dbb8521 100755
--- a/run
+++ b/run
@@ -1,6 +1,6 @@
#!/bin/bash
-SCALA_VERSION=2.8.1
+SCALA_VERSION=2.9.0.1
# Figure out where the Scala framework is installed
FWDIR="$(cd `dirname $0`; pwd)"
@@ -42,23 +42,23 @@ EXAMPLES_DIR=$FWDIR/examples
BAGEL_DIR=$FWDIR/bagel
# Build up classpath
-CLASSPATH="$SPARK_CLASSPATH:$CORE_DIR/target/scala_$SCALA_VERSION/classes:$MESOS_CLASSPATH"
+CLASSPATH="$SPARK_CLASSPATH:$CORE_DIR/target/scala-$SCALA_VERSION/classes:$MESOS_CLASSPATH"
CLASSPATH+=:$FWDIR/conf
-CLASSPATH+=:$REPL_DIR/target/scala_$SCALA_VERSION/classes
-CLASSPATH+=:$EXAMPLES_DIR/target/scala_$SCALA_VERSION/classes
+CLASSPATH+=:$REPL_DIR/target/scala-$SCALA_VERSION/classes
+CLASSPATH+=:$EXAMPLES_DIR/target/scala-$SCALA_VERSION/classes
for jar in `find $CORE_DIR/lib -name '*jar'`; do
CLASSPATH+=:$jar
done
-for jar in $CORE_DIR/lib_managed/scala_$SCALA_VERSION/compile/*.jar; do
+for jar in `find $FWDIR/lib_managed/jars -name '*jar'`; do
CLASSPATH+=:$jar
done
-for jar in `find $REPL_DIR/lib -name '*jar'`; do
+for jar in `find $FWDIR/lib_managed/bundles -name '*jar'`; do
CLASSPATH+=:$jar
done
-for jar in $REPL_DIR/lib_managed/scala_$SCALA_VERSION/compile/*.jar; do
+for jar in `find $REPL_DIR/lib -name '*jar'`; do
CLASSPATH+=:$jar
done
-CLASSPATH+=:$BAGEL_DIR/target/scala_$SCALA_VERSION/classes
+CLASSPATH+=:$BAGEL_DIR/target/scala-$SCALA_VERSION/classes
export CLASSPATH # Needed for spark-shell
if [ -n "$SCALA_HOME" ]; then
@@ -67,4 +67,5 @@ else
SCALA=scala
fi
+echo $CLASSPATH >> tmp
exec $SCALA -cp $CLASSPATH "$@"
diff --git a/sbt/sbt-launch-0.10.1.jar b/sbt/sbt-launch-0.10.1.jar
new file mode 100644
index 0000000000..673495f78a
--- /dev/null
+++ b/sbt/sbt-launch-0.10.1.jar
Binary files differ
diff --git a/sbt/sbt-launch-0.7.5.jar b/sbt/sbt-launch-0.7.5.jar
deleted file mode 100644
index 052c1e1e56..0000000000
--- a/sbt/sbt-launch-0.7.5.jar
+++ /dev/null
Binary files differ
diff --git a/spark-shell b/spark-shell
index 009f96fe98..29e5e65da2 100755
--- a/spark-shell
+++ b/spark-shell
@@ -1,3 +1,3 @@
#!/bin/sh
FWDIR="`dirname $0`"
-exec $FWDIR/run spark.repl.Main $@
+exec $FWDIR/run spark.repl.Main "$@"