aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2011-05-29 18:46:01 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2011-05-29 18:46:01 -0700
commit4096c2287ec8e69b0c879ea0c512b9f7152e15ab (patch)
treea03dfd990edb20c4b053f58a7b0e856ee59d6878 /core
parentef706ae9594f5fdefbe657d70ca28ae961ac980c (diff)
downloadspark-4096c2287ec8e69b0c879ea0c512b9f7152e15ab.tar.gz
spark-4096c2287ec8e69b0c879ea0c512b9f7152e15ab.tar.bz2
spark-4096c2287ec8e69b0c879ea0c512b9f7152e15ab.zip
Various fixes
Diffstat (limited to 'core')
-rw-r--r--core/lib/mesos.jarbin132297 -> 132286 bytes
-rw-r--r--core/lib/protobuf-2.3.0.jarbin444422 -> 0 bytes
-rw-r--r--core/src/main/scala/spark/MesosScheduler.scala8
-rw-r--r--core/src/main/scala/spark/SimpleJob.scala4
4 files changed, 9 insertions, 3 deletions
diff --git a/core/lib/mesos.jar b/core/lib/mesos.jar
index 921149edae..731720e83e 100644
--- a/core/lib/mesos.jar
+++ b/core/lib/mesos.jar
Binary files differ
diff --git a/core/lib/protobuf-2.3.0.jar b/core/lib/protobuf-2.3.0.jar
deleted file mode 100644
index b3d4056407..0000000000
--- a/core/lib/protobuf-2.3.0.jar
+++ /dev/null
Binary files differ
diff --git a/core/src/main/scala/spark/MesosScheduler.scala b/core/src/main/scala/spark/MesosScheduler.scala
index 393a33af8c..5ed769c5c2 100644
--- a/core/src/main/scala/spark/MesosScheduler.scala
+++ b/core/src/main/scala/spark/MesosScheduler.scala
@@ -79,7 +79,13 @@ extends MScheduler with DAGScheduler with Logging
override def run {
val sched = MesosScheduler.this
sched.driver = new MesosSchedulerDriver(sched, master)
- sched.driver.run()
+ try {
+ val ret = sched.driver.run()
+ logInfo("driver.run() returned with code " + ret)
+ } catch {
+ case e: Exception =>
+ logError("driver.run() failed", e)
+ }
}
}.start
}
diff --git a/core/src/main/scala/spark/SimpleJob.scala b/core/src/main/scala/spark/SimpleJob.scala
index 2961561f34..6255ee9388 100644
--- a/core/src/main/scala/spark/SimpleJob.scala
+++ b/core/src/main/scala/spark/SimpleJob.scala
@@ -204,7 +204,7 @@ extends Job(jobId) with Logging
val index = tidToIndex(tid.getValue)
if (!finished(index)) {
tasksFinished += 1
- logInfo("Finished TID %d (progress: %d/%d)".format(
+ logInfo("Finished TID %s (progress: %d/%d)".format(
tid, tasksFinished, numTasks))
// Deserialize task result
val result = Utils.deserialize[TaskResult[_]](status.getData.toByteArray)
@@ -223,7 +223,7 @@ extends Job(jobId) with Logging
val tid = status.getTaskId
val index = tidToIndex(tid.getValue)
if (!finished(index)) {
- logInfo("Lost TID %d (task %d:%d)".format(tid, jobId, index))
+ logInfo("Lost TID %s (task %d:%d)".format(tid, jobId, index))
launched(index) = false
tasksLaunched -= 1
// Check if the problem is a map output fetch failure. In that case, this