aboutsummaryrefslogtreecommitdiff
path: root/repl/scala-2.11/src/test
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2016-04-25 15:30:18 -0700
committerReynold Xin <rxin@databricks.com>2016-04-25 15:30:18 -0700
commit34336b6250d99bcf009b082cbf83f326d6b00074 (patch)
treeb24149ee9336a699aecd2264f5010f9ecb67ba72 /repl/scala-2.11/src/test
parent9cb3ba1013a7eae11be8a00fa4a9c5308bb20195 (diff)
downloadspark-34336b6250d99bcf009b082cbf83f326d6b00074.tar.gz
spark-34336b6250d99bcf009b082cbf83f326d6b00074.tar.bz2
spark-34336b6250d99bcf009b082cbf83f326d6b00074.zip
[SPARK-14828][SQL] Start SparkSession in REPL instead of SQLContext
## What changes were proposed in this pull request? ``` Spark context available as 'sc' (master = local[*], app id = local-1461283768192). Spark session available as 'spark'. Welcome to ____ __ / __/__ ___ _____/ /__ _\ \/ _ \/ _ `/ __/ '_/ /___/ .__/\_,_/_/ /_/\_\ version 2.0.0-SNAPSHOT /_/ Using Scala version 2.11.8 (Java HotSpot(TM) 64-Bit Server VM, Java 1.7.0_51) Type in expressions to have them evaluated. Type :help for more information. scala> sql("SHOW TABLES").collect() 16/04/21 17:09:39 WARN ObjectStore: Version information not found in metastore. hive.metastore.schema.verification is not enabled so recording the schema version 1.2.0 16/04/21 17:09:39 WARN ObjectStore: Failed to get database default, returning NoSuchObjectException res0: Array[org.apache.spark.sql.Row] = Array([src,false]) scala> sql("SHOW TABLES").collect() res1: Array[org.apache.spark.sql.Row] = Array([src,false]) scala> spark.createDataFrame(Seq((1, 1), (2, 2), (3, 3))) res2: org.apache.spark.sql.DataFrame = [_1: int, _2: int] ``` Hive things are loaded lazily. ## How was this patch tested? Manual. Author: Andrew Or <andrew@databricks.com> Closes #12589 from andrewor14/spark-session-repl.
Diffstat (limited to 'repl/scala-2.11/src/test')
-rw-r--r--repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala5
1 files changed, 3 insertions, 2 deletions
diff --git a/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index d3dafe9c42..af82e7a111 100644
--- a/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -249,10 +249,11 @@ class ReplSuite extends SparkFunSuite {
assertDoesNotContain("Exception", output)
}
- test("SPARK-2576 importing SQLContext.createDataFrame.") {
+ test("SPARK-2576 importing implicits") {
// We need to use local-cluster to test this case.
val output = runInterpreter("local-cluster[1,1,1024]",
"""
+ |import spark.implicits._
|case class TestCaseClass(value: Int)
|sc.parallelize(1 to 10).map(x => TestCaseClass(x)).toDF().collect()
|
@@ -366,7 +367,7 @@ class ReplSuite extends SparkFunSuite {
test("define case class and create Dataset together with paste mode") {
val output = runInterpreterInPasteMode("local-cluster[1,1,1024]",
"""
- |import sqlContext.implicits._
+ |import spark.implicits._
|case class TestClass(value: Int)
|Seq(TestClass(1)).toDS()
""".stripMargin)