From 0588dc7c0a9f3180dddae0dc202a6d41eb43464f Mon Sep 17 00:00:00 2001 From: Hossein Date: Mon, 27 Mar 2017 08:53:45 -0700 Subject: [SPARK-20088] Do not create new SparkContext in SparkR createSparkContext ## What changes were proposed in this pull request? Instead of creating new `JavaSparkContext` we use `SparkContext.getOrCreate`. ## How was this patch tested? Existing tests Author: Hossein Closes #17423 from falaki/SPARK-20088. --- core/src/main/scala/org/apache/spark/api/r/RRDD.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'core/src') diff --git a/core/src/main/scala/org/apache/spark/api/r/RRDD.scala b/core/src/main/scala/org/apache/spark/api/r/RRDD.scala index 72ae0340aa..295355c7bf 100644 --- a/core/src/main/scala/org/apache/spark/api/r/RRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/r/RRDD.scala @@ -136,7 +136,7 @@ private[r] object RRDD { .mkString(File.separator)) } - val jsc = new JavaSparkContext(sparkConf) + val jsc = new JavaSparkContext(SparkContext.getOrCreate(sparkConf)) jars.foreach { jar => jsc.addJar(jar) } -- cgit v1.2.3