aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorWilliam Benton <willb@redhat.com>2014-02-26 10:09:50 -0800
committerAaron Davidson <aaron@databricks.com>2014-02-26 10:09:50 -0800
commitfbedc8eff2573b31320ad1b11a1826a2e530c16c (patch)
tree6df1eb420790f6969675099386911173733bdd14
parentb8a1871953058c67b49b7f8455cbb417d5b50ab6 (diff)
downloadspark-fbedc8eff2573b31320ad1b11a1826a2e530c16c.tar.gz
spark-fbedc8eff2573b31320ad1b11a1826a2e530c16c.tar.bz2
spark-fbedc8eff2573b31320ad1b11a1826a2e530c16c.zip
SPARK-1078: Replace lift-json with json4s-jackson.
The aim of the Json4s project is to provide a common API for Scala JSON libraries. It is Apache-licensed, easier for downstream distributions to package, and mostly API-compatible with lift-json. Furthermore, the Jackson-backed implementation parses faster than lift-json on all but the smallest inputs. Author: William Benton <willb@redhat.com> Closes #582 from willb/json4s and squashes the following commits: 7ca62c4 [William Benton] Replace lift-json with json4s-jackson.
-rw-r--r--core/pom.xml5
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala9
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/ui/IndexPage.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/ui/JettyUtils.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala27
-rw-r--r--project/SparkBuild.scala2
9 files changed, 32 insertions, 24 deletions
diff --git a/core/pom.xml b/core/pom.xml
index f209704f31..ebc178a105 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -130,8 +130,9 @@
<artifactId>scala-library</artifactId>
</dependency>
<dependency>
- <groupId>net.liftweb</groupId>
- <artifactId>lift-json_${scala.binary.version}</artifactId>
+ <groupId>org.json4s</groupId>
+ <artifactId>json4s-jackson_${scala.binary.version}</artifactId>
+ <version>3.2.6</version>
</dependency>
<dependency>
<groupId>it.unimi.dsi</groupId>
diff --git a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
index 190b331cfe..d48c1892ae 100644
--- a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
@@ -27,7 +27,8 @@ import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.sys.process._
-import net.liftweb.json.JsonParser
+import org.json4s._
+import org.json4s.jackson.JsonMethods
import org.apache.spark.{Logging, SparkContext}
import org.apache.spark.deploy.master.RecoveryState
@@ -311,7 +312,7 @@ private[spark] object FaultToleranceTest extends App with Logging {
private[spark] class TestMasterInfo(val ip: String, val dockerId: DockerId, val logFile: File)
extends Logging {
- implicit val formats = net.liftweb.json.DefaultFormats
+ implicit val formats = org.json4s.DefaultFormats
var state: RecoveryState.Value = _
var liveWorkerIPs: List[String] = _
var numLiveApps = 0
@@ -321,7 +322,7 @@ private[spark] class TestMasterInfo(val ip: String, val dockerId: DockerId, val
def readState() {
try {
val masterStream = new InputStreamReader(new URL("http://%s:8080/json".format(ip)).openStream)
- val json = JsonParser.parse(masterStream, closeAutomatically = true)
+ val json = JsonMethods.parse(masterStream)
val workers = json \ "workers"
val liveWorkers = workers.children.filter(w => (w \ "state").extract[String] == "ALIVE")
@@ -349,7 +350,7 @@ private[spark] class TestMasterInfo(val ip: String, val dockerId: DockerId, val
private[spark] class TestWorkerInfo(val ip: String, val dockerId: DockerId, val logFile: File)
extends Logging {
- implicit val formats = net.liftweb.json.DefaultFormats
+ implicit val formats = org.json4s.DefaultFormats
logDebug("Created worker: " + this)
diff --git a/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
index 318beb5db5..cefb1ff97e 100644
--- a/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
@@ -17,7 +17,7 @@
package org.apache.spark.deploy
-import net.liftweb.json.JsonDSL._
+import org.json4s.JsonDSL._
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
index 5cc4adbe44..90cad3c37f 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
@@ -23,7 +23,8 @@ import scala.concurrent.Await
import scala.xml.Node
import akka.pattern.ask
-import net.liftweb.json.JsonAST.JValue
+import javax.servlet.http.HttpServletRequest
+import org.json4s.JValue
import org.apache.spark.deploy.JsonProtocol
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
index 01c8f9065e..bac922bcd3 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
@@ -23,7 +23,8 @@ import scala.concurrent.Await
import scala.xml.Node
import akka.pattern.ask
-import net.liftweb.json.JsonAST.JValue
+import javax.servlet.http.HttpServletRequest
+import org.json4s.JValue
import org.apache.spark.deploy.{DeployWebUI, JsonProtocol}
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ui/IndexPage.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/IndexPage.scala
index 3089acffb8..85200ab0e1 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ui/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/IndexPage.scala
@@ -22,7 +22,7 @@ import scala.xml.Node
import akka.pattern.ask
import javax.servlet.http.HttpServletRequest
-import net.liftweb.json.JsonAST.JValue
+import org.json4s.JValue
import org.apache.spark.deploy.JsonProtocol
import org.apache.spark.deploy.DeployMessages.{RequestWorkerState, WorkerStateResponse}
diff --git a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
index 1f048a84cd..1b78c52ff6 100644
--- a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
@@ -24,7 +24,8 @@ import scala.annotation.tailrec
import scala.util.{Failure, Success, Try}
import scala.xml.Node
-import net.liftweb.json.{JValue, pretty, render}
+import org.json4s.JValue
+import org.json4s.jackson.JsonMethods.{pretty, render}
import org.eclipse.jetty.server.{Handler, Request, Server}
import org.eclipse.jetty.server.handler.{AbstractHandler, ContextHandler, HandlerList, ResourceHandler}
import org.eclipse.jetty.util.thread.QueuedThreadPool
diff --git a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
index de866ed7ff..bae3b37e26 100644
--- a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
@@ -20,9 +20,12 @@ package org.apache.spark.deploy
import java.io.File
import java.util.Date
-import net.liftweb.json.Diff
-import net.liftweb.json.{JsonAST, JsonParser}
-import net.liftweb.json.JsonAST.{JNothing, JValue}
+import org.json4s._
+
+import org.json4s.JValue
+import org.json4s.jackson.JsonMethods
+import com.fasterxml.jackson.core.JsonParseException
+
import org.scalatest.FunSuite
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
@@ -34,31 +37,31 @@ class JsonProtocolSuite extends FunSuite {
test("writeApplicationInfo") {
val output = JsonProtocol.writeApplicationInfo(createAppInfo())
assertValidJson(output)
- assertValidDataInJson(output, JsonParser.parse(JsonConstants.appInfoJsonStr))
+ assertValidDataInJson(output, JsonMethods.parse(JsonConstants.appInfoJsonStr))
}
test("writeWorkerInfo") {
val output = JsonProtocol.writeWorkerInfo(createWorkerInfo())
assertValidJson(output)
- assertValidDataInJson(output, JsonParser.parse(JsonConstants.workerInfoJsonStr))
+ assertValidDataInJson(output, JsonMethods.parse(JsonConstants.workerInfoJsonStr))
}
test("writeApplicationDescription") {
val output = JsonProtocol.writeApplicationDescription(createAppDesc())
assertValidJson(output)
- assertValidDataInJson(output, JsonParser.parse(JsonConstants.appDescJsonStr))
+ assertValidDataInJson(output, JsonMethods.parse(JsonConstants.appDescJsonStr))
}
test("writeExecutorRunner") {
val output = JsonProtocol.writeExecutorRunner(createExecutorRunner())
assertValidJson(output)
- assertValidDataInJson(output, JsonParser.parse(JsonConstants.executorRunnerJsonStr))
+ assertValidDataInJson(output, JsonMethods.parse(JsonConstants.executorRunnerJsonStr))
}
test("writeDriverInfo") {
val output = JsonProtocol.writeDriverInfo(createDriverInfo())
assertValidJson(output)
- assertValidDataInJson(output, JsonParser.parse(JsonConstants.driverInfoJsonStr))
+ assertValidDataInJson(output, JsonMethods.parse(JsonConstants.driverInfoJsonStr))
}
test("writeMasterState") {
@@ -71,7 +74,7 @@ class JsonProtocolSuite extends FunSuite {
activeDrivers, completedDrivers, RecoveryState.ALIVE)
val output = JsonProtocol.writeMasterState(stateResponse)
assertValidJson(output)
- assertValidDataInJson(output, JsonParser.parse(JsonConstants.masterStateJsonStr))
+ assertValidDataInJson(output, JsonMethods.parse(JsonConstants.masterStateJsonStr))
}
test("writeWorkerState") {
@@ -83,7 +86,7 @@ class JsonProtocolSuite extends FunSuite {
finishedExecutors, drivers, finishedDrivers, "masterUrl", 4, 1234, 4, 1234, "masterWebUiUrl")
val output = JsonProtocol.writeWorkerState(stateResponse)
assertValidJson(output)
- assertValidDataInJson(output, JsonParser.parse(JsonConstants.workerStateJsonStr))
+ assertValidDataInJson(output, JsonMethods.parse(JsonConstants.workerStateJsonStr))
}
def createAppDesc(): ApplicationDescription = {
@@ -125,9 +128,9 @@ class JsonProtocolSuite extends FunSuite {
def assertValidJson(json: JValue) {
try {
- JsonParser.parse(JsonAST.compactRender(json))
+ JsonMethods.parse(JsonMethods.compact(json))
} catch {
- case e: JsonParser.ParseException => fail("Invalid Json detected", e)
+ case e: JsonParseException => fail("Invalid Json detected", e)
}
}
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 220894affb..7d157160d1 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -268,7 +268,7 @@ object SparkBuild extends Build {
"org.spark-project.akka" %% "akka-remote" % "2.2.3-shaded-protobuf" excludeAll(excludeNetty),
"org.spark-project.akka" %% "akka-slf4j" % "2.2.3-shaded-protobuf" excludeAll(excludeNetty),
"org.spark-project.akka" %% "akka-testkit" % "2.2.3-shaded-protobuf" % "test",
- "net.liftweb" %% "lift-json" % "2.5.1" excludeAll(excludeNetty),
+ "org.json4s" %% "json4s-jackson" % "3.2.6",
"it.unimi.dsi" % "fastutil" % "6.4.4",
"colt" % "colt" % "1.2.0",
"org.apache.mesos" % "mesos" % "0.13.0",