diff options
author | Patrick Wendell <pwendell@gmail.com> | 2013-09-04 22:47:03 -0700 |
---|---|---|
committer | Patrick Wendell <pwendell@gmail.com> | 2013-09-04 22:47:03 -0700 |
commit | 5c7494d7c1b7301138fb3dc155a1b0c961126ec6 (patch) | |
tree | 8d53c6fbd3bcc4b8bbcb408611e775392b69243b /core | |
parent | a54786678fb4f3663f90ecc063af3b3028aff650 (diff) | |
parent | 714e7f9e32590c302ad315b7cbee72b2e8b32b9b (diff) | |
download | spark-5c7494d7c1b7301138fb3dc155a1b0c961126ec6.tar.gz spark-5c7494d7c1b7301138fb3dc155a1b0c961126ec6.tar.bz2 spark-5c7494d7c1b7301138fb3dc155a1b0c961126ec6.zip |
Merge pull request #893 from ilikerps/master
SPARK-884: Add unit test to validate Spark JSON output
Diffstat (limited to 'core')
-rw-r--r-- | core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala | 92 |
1 files changed, 92 insertions, 0 deletions
diff --git a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala new file mode 100644 index 0000000000..05f8545c7b --- /dev/null +++ b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.deploy + +import java.io.File +import java.util.Date + +import net.liftweb.json.{JsonAST, JsonParser} +import net.liftweb.json.JsonAST.JValue +import org.scalatest.FunSuite + +import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse} +import org.apache.spark.deploy.master.{ApplicationInfo, WorkerInfo} +import org.apache.spark.deploy.worker.ExecutorRunner + +class JsonProtocolSuite extends FunSuite { + test("writeApplicationInfo") { + val output = JsonProtocol.writeApplicationInfo(createAppInfo()) + assertValidJson(output) + } + + test("writeWorkerInfo") { + val output = JsonProtocol.writeWorkerInfo(createWorkerInfo()) + assertValidJson(output) + } + + test("writeApplicationDescription") { + val output = JsonProtocol.writeApplicationDescription(createAppDesc()) + assertValidJson(output) + } + + test("writeExecutorRunner") { + val output = JsonProtocol.writeExecutorRunner(createExecutorRunner()) + assertValidJson(output) + } + + test("writeMasterState") { + val workers = Array[WorkerInfo](createWorkerInfo(), createWorkerInfo()) + val activeApps = Array[ApplicationInfo](createAppInfo()) + val completedApps = Array[ApplicationInfo]() + val stateResponse = new MasterStateResponse("host", 8080, workers, activeApps, completedApps) + val output = JsonProtocol.writeMasterState(stateResponse) + assertValidJson(output) + } + + test("writeWorkerState") { + val executors = List[ExecutorRunner]() + val finishedExecutors = List[ExecutorRunner](createExecutorRunner(), createExecutorRunner()) + val stateResponse = new WorkerStateResponse("host", 8080, "workerId", executors, + finishedExecutors, "masterUrl", 4, 1234, 4, 1234, "masterWebUiUrl") + val output = JsonProtocol.writeWorkerState(stateResponse) + assertValidJson(output) + } + + def createAppDesc() : ApplicationDescription = { + val cmd = new Command("mainClass", List("arg1", "arg2"), Map()) + new ApplicationDescription("name", 4, 1234, cmd, "sparkHome", "appUiUrl") + } + def createAppInfo() : ApplicationInfo = { + new ApplicationInfo(3, "id", createAppDesc(), new Date(123456789), null, "appUriStr") + } + def createWorkerInfo() : WorkerInfo = { + new WorkerInfo("id", "host", 8080, 4, 1234, null, 80, "publicAddress") + } + def createExecutorRunner() : ExecutorRunner = { + new ExecutorRunner("appId", 123, createAppDesc(), 4, 1234, null, "workerId", "host", + new File("sparkHome"), new File("workDir")) + } + + def assertValidJson(json: JValue) { + try { + JsonParser.parse(JsonAST.compactRender(json)) + } catch { + case e: JsonParser.ParseException => fail("Invalid Json detected", e) + } + } +} |