Skip to content
Snippets Groups Projects
Commit fbedc8ef authored by William Benton's avatar William Benton Committed by Aaron Davidson
Browse files

SPARK-1078: Replace lift-json with json4s-jackson.

The aim of the Json4s project is to provide a common API for
Scala JSON libraries.  It is Apache-licensed, easier for
downstream distributions to package, and mostly API-compatible
with lift-json.  Furthermore, the Jackson-backed implementation
parses faster than lift-json on all but the smallest inputs.

Author: William Benton <willb@redhat.com>

Closes #582 from willb/json4s and squashes the following commits:

7ca62c4 [William Benton] Replace lift-json with json4s-jackson.
parent b8a18719
No related branches found
No related tags found
No related merge requests found
...@@ -130,8 +130,9 @@ ...@@ -130,8 +130,9 @@
<artifactId>scala-library</artifactId> <artifactId>scala-library</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>net.liftweb</groupId> <groupId>org.json4s</groupId>
<artifactId>lift-json_${scala.binary.version}</artifactId> <artifactId>json4s-jackson_${scala.binary.version}</artifactId>
<version>3.2.6</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>it.unimi.dsi</groupId> <groupId>it.unimi.dsi</groupId>
......
...@@ -27,7 +27,8 @@ import scala.concurrent.ExecutionContext.Implicits.global ...@@ -27,7 +27,8 @@ import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._ import scala.concurrent.duration._
import scala.sys.process._ import scala.sys.process._
import net.liftweb.json.JsonParser import org.json4s._
import org.json4s.jackson.JsonMethods
import org.apache.spark.{Logging, SparkContext} import org.apache.spark.{Logging, SparkContext}
import org.apache.spark.deploy.master.RecoveryState import org.apache.spark.deploy.master.RecoveryState
...@@ -311,7 +312,7 @@ private[spark] object FaultToleranceTest extends App with Logging { ...@@ -311,7 +312,7 @@ private[spark] object FaultToleranceTest extends App with Logging {
private[spark] class TestMasterInfo(val ip: String, val dockerId: DockerId, val logFile: File) private[spark] class TestMasterInfo(val ip: String, val dockerId: DockerId, val logFile: File)
extends Logging { extends Logging {
implicit val formats = net.liftweb.json.DefaultFormats implicit val formats = org.json4s.DefaultFormats
var state: RecoveryState.Value = _ var state: RecoveryState.Value = _
var liveWorkerIPs: List[String] = _ var liveWorkerIPs: List[String] = _
var numLiveApps = 0 var numLiveApps = 0
...@@ -321,7 +322,7 @@ private[spark] class TestMasterInfo(val ip: String, val dockerId: DockerId, val ...@@ -321,7 +322,7 @@ private[spark] class TestMasterInfo(val ip: String, val dockerId: DockerId, val
def readState() { def readState() {
try { try {
val masterStream = new InputStreamReader(new URL("http://%s:8080/json".format(ip)).openStream) val masterStream = new InputStreamReader(new URL("http://%s:8080/json".format(ip)).openStream)
val json = JsonParser.parse(masterStream, closeAutomatically = true) val json = JsonMethods.parse(masterStream)
val workers = json \ "workers" val workers = json \ "workers"
val liveWorkers = workers.children.filter(w => (w \ "state").extract[String] == "ALIVE") val liveWorkers = workers.children.filter(w => (w \ "state").extract[String] == "ALIVE")
...@@ -349,7 +350,7 @@ private[spark] class TestMasterInfo(val ip: String, val dockerId: DockerId, val ...@@ -349,7 +350,7 @@ private[spark] class TestMasterInfo(val ip: String, val dockerId: DockerId, val
private[spark] class TestWorkerInfo(val ip: String, val dockerId: DockerId, val logFile: File) private[spark] class TestWorkerInfo(val ip: String, val dockerId: DockerId, val logFile: File)
extends Logging { extends Logging {
implicit val formats = net.liftweb.json.DefaultFormats implicit val formats = org.json4s.DefaultFormats
logDebug("Created worker: " + this) logDebug("Created worker: " + this)
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
package org.apache.spark.deploy package org.apache.spark.deploy
import net.liftweb.json.JsonDSL._ import org.json4s.JsonDSL._
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse} import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo} import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
......
...@@ -23,7 +23,8 @@ import scala.concurrent.Await ...@@ -23,7 +23,8 @@ import scala.concurrent.Await
import scala.xml.Node import scala.xml.Node
import akka.pattern.ask import akka.pattern.ask
import net.liftweb.json.JsonAST.JValue import javax.servlet.http.HttpServletRequest
import org.json4s.JValue
import org.apache.spark.deploy.JsonProtocol import org.apache.spark.deploy.JsonProtocol
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState} import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
......
...@@ -23,7 +23,8 @@ import scala.concurrent.Await ...@@ -23,7 +23,8 @@ import scala.concurrent.Await
import scala.xml.Node import scala.xml.Node
import akka.pattern.ask import akka.pattern.ask
import net.liftweb.json.JsonAST.JValue import javax.servlet.http.HttpServletRequest
import org.json4s.JValue
import org.apache.spark.deploy.{DeployWebUI, JsonProtocol} import org.apache.spark.deploy.{DeployWebUI, JsonProtocol}
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState} import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
......
...@@ -22,7 +22,7 @@ import scala.xml.Node ...@@ -22,7 +22,7 @@ import scala.xml.Node
import akka.pattern.ask import akka.pattern.ask
import javax.servlet.http.HttpServletRequest import javax.servlet.http.HttpServletRequest
import net.liftweb.json.JsonAST.JValue import org.json4s.JValue
import org.apache.spark.deploy.JsonProtocol import org.apache.spark.deploy.JsonProtocol
import org.apache.spark.deploy.DeployMessages.{RequestWorkerState, WorkerStateResponse} import org.apache.spark.deploy.DeployMessages.{RequestWorkerState, WorkerStateResponse}
......
...@@ -24,7 +24,8 @@ import scala.annotation.tailrec ...@@ -24,7 +24,8 @@ import scala.annotation.tailrec
import scala.util.{Failure, Success, Try} import scala.util.{Failure, Success, Try}
import scala.xml.Node import scala.xml.Node
import net.liftweb.json.{JValue, pretty, render} import org.json4s.JValue
import org.json4s.jackson.JsonMethods.{pretty, render}
import org.eclipse.jetty.server.{Handler, Request, Server} import org.eclipse.jetty.server.{Handler, Request, Server}
import org.eclipse.jetty.server.handler.{AbstractHandler, ContextHandler, HandlerList, ResourceHandler} import org.eclipse.jetty.server.handler.{AbstractHandler, ContextHandler, HandlerList, ResourceHandler}
import org.eclipse.jetty.util.thread.QueuedThreadPool import org.eclipse.jetty.util.thread.QueuedThreadPool
......
...@@ -20,9 +20,12 @@ package org.apache.spark.deploy ...@@ -20,9 +20,12 @@ package org.apache.spark.deploy
import java.io.File import java.io.File
import java.util.Date import java.util.Date
import net.liftweb.json.Diff import org.json4s._
import net.liftweb.json.{JsonAST, JsonParser}
import net.liftweb.json.JsonAST.{JNothing, JValue} import org.json4s.JValue
import org.json4s.jackson.JsonMethods
import com.fasterxml.jackson.core.JsonParseException
import org.scalatest.FunSuite import org.scalatest.FunSuite
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse} import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
...@@ -34,31 +37,31 @@ class JsonProtocolSuite extends FunSuite { ...@@ -34,31 +37,31 @@ class JsonProtocolSuite extends FunSuite {
test("writeApplicationInfo") { test("writeApplicationInfo") {
val output = JsonProtocol.writeApplicationInfo(createAppInfo()) val output = JsonProtocol.writeApplicationInfo(createAppInfo())
assertValidJson(output) assertValidJson(output)
assertValidDataInJson(output, JsonParser.parse(JsonConstants.appInfoJsonStr)) assertValidDataInJson(output, JsonMethods.parse(JsonConstants.appInfoJsonStr))
} }
test("writeWorkerInfo") { test("writeWorkerInfo") {
val output = JsonProtocol.writeWorkerInfo(createWorkerInfo()) val output = JsonProtocol.writeWorkerInfo(createWorkerInfo())
assertValidJson(output) assertValidJson(output)
assertValidDataInJson(output, JsonParser.parse(JsonConstants.workerInfoJsonStr)) assertValidDataInJson(output, JsonMethods.parse(JsonConstants.workerInfoJsonStr))
} }
test("writeApplicationDescription") { test("writeApplicationDescription") {
val output = JsonProtocol.writeApplicationDescription(createAppDesc()) val output = JsonProtocol.writeApplicationDescription(createAppDesc())
assertValidJson(output) assertValidJson(output)
assertValidDataInJson(output, JsonParser.parse(JsonConstants.appDescJsonStr)) assertValidDataInJson(output, JsonMethods.parse(JsonConstants.appDescJsonStr))
} }
test("writeExecutorRunner") { test("writeExecutorRunner") {
val output = JsonProtocol.writeExecutorRunner(createExecutorRunner()) val output = JsonProtocol.writeExecutorRunner(createExecutorRunner())
assertValidJson(output) assertValidJson(output)
assertValidDataInJson(output, JsonParser.parse(JsonConstants.executorRunnerJsonStr)) assertValidDataInJson(output, JsonMethods.parse(JsonConstants.executorRunnerJsonStr))
} }
test("writeDriverInfo") { test("writeDriverInfo") {
val output = JsonProtocol.writeDriverInfo(createDriverInfo()) val output = JsonProtocol.writeDriverInfo(createDriverInfo())
assertValidJson(output) assertValidJson(output)
assertValidDataInJson(output, JsonParser.parse(JsonConstants.driverInfoJsonStr)) assertValidDataInJson(output, JsonMethods.parse(JsonConstants.driverInfoJsonStr))
} }
test("writeMasterState") { test("writeMasterState") {
...@@ -71,7 +74,7 @@ class JsonProtocolSuite extends FunSuite { ...@@ -71,7 +74,7 @@ class JsonProtocolSuite extends FunSuite {
activeDrivers, completedDrivers, RecoveryState.ALIVE) activeDrivers, completedDrivers, RecoveryState.ALIVE)
val output = JsonProtocol.writeMasterState(stateResponse) val output = JsonProtocol.writeMasterState(stateResponse)
assertValidJson(output) assertValidJson(output)
assertValidDataInJson(output, JsonParser.parse(JsonConstants.masterStateJsonStr)) assertValidDataInJson(output, JsonMethods.parse(JsonConstants.masterStateJsonStr))
} }
test("writeWorkerState") { test("writeWorkerState") {
...@@ -83,7 +86,7 @@ class JsonProtocolSuite extends FunSuite { ...@@ -83,7 +86,7 @@ class JsonProtocolSuite extends FunSuite {
finishedExecutors, drivers, finishedDrivers, "masterUrl", 4, 1234, 4, 1234, "masterWebUiUrl") finishedExecutors, drivers, finishedDrivers, "masterUrl", 4, 1234, 4, 1234, "masterWebUiUrl")
val output = JsonProtocol.writeWorkerState(stateResponse) val output = JsonProtocol.writeWorkerState(stateResponse)
assertValidJson(output) assertValidJson(output)
assertValidDataInJson(output, JsonParser.parse(JsonConstants.workerStateJsonStr)) assertValidDataInJson(output, JsonMethods.parse(JsonConstants.workerStateJsonStr))
} }
def createAppDesc(): ApplicationDescription = { def createAppDesc(): ApplicationDescription = {
...@@ -125,9 +128,9 @@ class JsonProtocolSuite extends FunSuite { ...@@ -125,9 +128,9 @@ class JsonProtocolSuite extends FunSuite {
def assertValidJson(json: JValue) { def assertValidJson(json: JValue) {
try { try {
JsonParser.parse(JsonAST.compactRender(json)) JsonMethods.parse(JsonMethods.compact(json))
} catch { } catch {
case e: JsonParser.ParseException => fail("Invalid Json detected", e) case e: JsonParseException => fail("Invalid Json detected", e)
} }
} }
......
...@@ -268,7 +268,7 @@ object SparkBuild extends Build { ...@@ -268,7 +268,7 @@ object SparkBuild extends Build {
"org.spark-project.akka" %% "akka-remote" % "2.2.3-shaded-protobuf" excludeAll(excludeNetty), "org.spark-project.akka" %% "akka-remote" % "2.2.3-shaded-protobuf" excludeAll(excludeNetty),
"org.spark-project.akka" %% "akka-slf4j" % "2.2.3-shaded-protobuf" excludeAll(excludeNetty), "org.spark-project.akka" %% "akka-slf4j" % "2.2.3-shaded-protobuf" excludeAll(excludeNetty),
"org.spark-project.akka" %% "akka-testkit" % "2.2.3-shaded-protobuf" % "test", "org.spark-project.akka" %% "akka-testkit" % "2.2.3-shaded-protobuf" % "test",
"net.liftweb" %% "lift-json" % "2.5.1" excludeAll(excludeNetty), "org.json4s" %% "json4s-jackson" % "3.2.6",
"it.unimi.dsi" % "fastutil" % "6.4.4", "it.unimi.dsi" % "fastutil" % "6.4.4",
"colt" % "colt" % "1.2.0", "colt" % "colt" % "1.2.0",
"org.apache.mesos" % "mesos" % "0.13.0", "org.apache.mesos" % "mesos" % "0.13.0",
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment