aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2011-07-14 17:29:11 -0400
committerMatei Zaharia <matei@eecs.berkeley.edu>2011-07-14 17:29:11 -0400
commitc8eb8b2b90e35db7ddc2d8d539828c250123450d (patch)
tree24669d3fe5c81b692d2c6316b61538b027ee1bef /core
parente4c3402d2d174c50a756d0ac6b6f888363f8d7c9 (diff)
downloadspark-c8eb8b2b90e35db7ddc2d8d539828c250123450d.tar.gz
spark-c8eb8b2b90e35db7ddc2d8d539828c250123450d.tar.bz2
spark-c8eb8b2b90e35db7ddc2d8d539828c250123450d.zip
Set class loader for remote actors to fix a bug that happens in 2.9
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/spark/Executor.scala4
-rw-r--r--core/src/main/scala/spark/SparkContext.scala5
2 files changed, 9 insertions, 0 deletions
diff --git a/core/src/main/scala/spark/Executor.scala b/core/src/main/scala/spark/Executor.scala
index 0942fecff3..d4d80845c5 100644
--- a/core/src/main/scala/spark/Executor.scala
+++ b/core/src/main/scala/spark/Executor.scala
@@ -4,6 +4,7 @@ import java.io.{File, FileOutputStream}
import java.net.{URI, URL, URLClassLoader}
import java.util.concurrent._
+import scala.actors.remote.RemoteActor
import scala.collection.mutable.ArrayBuffer
import mesos.{ExecutorArgs, ExecutorDriver, MesosExecutorDriver}
@@ -25,6 +26,9 @@ class Executor extends mesos.Executor with Logging {
for ((key, value) <- props)
System.setProperty(key, value)
+ // Make sure an appropriate class loader is set for remote actors
+ RemoteActor.classLoader = getClass.getClassLoader
+
// Initialize Spark environment (using system properties read above)
env = SparkEnv.createFromSystemProperties(false)
SparkEnv.set(env)
diff --git a/core/src/main/scala/spark/SparkContext.scala b/core/src/main/scala/spark/SparkContext.scala
index 1f2bddc60e..74e6ff1fec 100644
--- a/core/src/main/scala/spark/SparkContext.scala
+++ b/core/src/main/scala/spark/SparkContext.scala
@@ -3,6 +3,7 @@ package spark
import java.io._
import java.util.concurrent.atomic.AtomicInteger
+import scala.actors.remote.RemoteActor
import scala.collection.mutable.ArrayBuffer
import org.apache.hadoop.mapred.InputFormat
@@ -37,6 +38,10 @@ extends Logging {
System.setProperty("spark.master.host", Utils.localHostName)
if (System.getProperty("spark.master.port") == null)
System.setProperty("spark.master.port", "50501")
+
+ // Make sure a proper class loader is set for remote actors (unless user set one)
+ if (RemoteActor.classLoader == null)
+ RemoteActor.classLoader = getClass.getClassLoader
// Create the Spark execution environment (cache, map output tracker, etc)
val env = SparkEnv.createFromSystemProperties(true)