aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2013-12-26 15:43:15 -0800
committerPatrick Wendell <pwendell@gmail.com>2013-12-26 15:43:15 -0800
commit55c8bb741c9e3780a63b88ff24146b1d81663142 (patch)
treeaa3592092e1faf8fdec18d52cfacfad06f7ccf7a /core/src/test
parent5c1b4f64052e8fae0d942def4d6085a971faee4e (diff)
downloadspark-55c8bb741c9e3780a63b88ff24146b1d81663142.tar.gz
spark-55c8bb741c9e3780a63b88ff24146b1d81663142.tar.bz2
spark-55c8bb741c9e3780a63b88ff24146b1d81663142.zip
Intermediate clean-up of tests to appease jenkins
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala35
1 files changed, 25 insertions, 10 deletions
diff --git a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
index 0b38e239f9..485f688aa6 100644
--- a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
@@ -25,8 +25,8 @@ import net.liftweb.json.JsonAST.JValue
import org.scalatest.FunSuite
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
-import org.apache.spark.deploy.master.{ApplicationInfo, RecoveryState, WorkerInfo}
-import org.apache.spark.deploy.worker.ExecutorRunner
+import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, RecoveryState, WorkerInfo}
+import org.apache.spark.deploy.worker.{ExecutorRunner, DriverRunner}
class JsonProtocolSuite extends FunSuite {
test("writeApplicationInfo") {
@@ -50,11 +50,13 @@ class JsonProtocolSuite extends FunSuite {
}
test("writeMasterState") {
- val workers = Array[WorkerInfo](createWorkerInfo(), createWorkerInfo())
- val activeApps = Array[ApplicationInfo](createAppInfo())
+ val workers = Array(createWorkerInfo(), createWorkerInfo())
+ val activeApps = Array(createAppInfo())
val completedApps = Array[ApplicationInfo]()
+ val activeDrivers = Array(createDriverInfo())
+ val completedDrivers = Array(createDriverInfo())
val stateResponse = new MasterStateResponse("host", 8080, workers, activeApps, completedApps,
- RecoveryState.ALIVE)
+ activeDrivers, completedDrivers, RecoveryState.ALIVE)
val output = JsonProtocol.writeMasterState(stateResponse)
assertValidJson(output)
}
@@ -62,26 +64,39 @@ class JsonProtocolSuite extends FunSuite {
test("writeWorkerState") {
val executors = List[ExecutorRunner]()
val finishedExecutors = List[ExecutorRunner](createExecutorRunner(), createExecutorRunner())
+ val drivers = List(createDriverRunner())
+ val finishedDrivers = List(createDriverRunner(), createDriverRunner())
val stateResponse = new WorkerStateResponse("host", 8080, "workerId", executors,
- finishedExecutors, "masterUrl", 4, 1234, 4, 1234, "masterWebUiUrl")
+ finishedExecutors, drivers, finishedDrivers, "masterUrl", 4, 1234, 4, 1234, "masterWebUiUrl")
val output = JsonProtocol.writeWorkerState(stateResponse)
assertValidJson(output)
}
- def createAppDesc() : ApplicationDescription = {
+ def createAppDesc(): ApplicationDescription = {
val cmd = new Command("mainClass", List("arg1", "arg2"), Map())
new ApplicationDescription("name", 4, 1234, cmd, "sparkHome", "appUiUrl")
}
- def createAppInfo() : ApplicationInfo = {
+ def createAppInfo(): ApplicationInfo = {
new ApplicationInfo(3, "id", createAppDesc(), new Date(123456789), null, "appUriStr")
}
- def createWorkerInfo() : WorkerInfo = {
+
+ def createDriverDesc() = new DriverDescription(
+ "hdfs://some-dir/some.jar", "org.apache.spark.FakeClass", 100, 3,
+ Seq("--some-config", "val", "--other-config", "val"), Seq("-Dsystem.property=someValue"),
+ Seq(("K1", "V1"), ("K2", "V2"))
+ )
+ def createDriverInfo(): DriverInfo = new DriverInfo(3, "driver-3", createDriverDesc(), new Date())
+
+ def createWorkerInfo(): WorkerInfo = {
new WorkerInfo("id", "host", 8080, 4, 1234, null, 80, "publicAddress")
}
- def createExecutorRunner() : ExecutorRunner = {
+ def createExecutorRunner(): ExecutorRunner = {
new ExecutorRunner("appId", 123, createAppDesc(), 4, 1234, null, "workerId", "host",
new File("sparkHome"), new File("workDir"), ExecutorState.RUNNING)
}
+ def createDriverRunner(): DriverRunner = {
+ new DriverRunner("driverId", new File("workDir"), createDriverDesc(), null)
+ }
def assertValidJson(json: JValue) {
try {