aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorjerryshao <sshao@hortonworks.com>2016-04-20 10:48:11 -0700
committerReynold Xin <rxin@databricks.com>2016-04-20 10:48:11 -0700
commit90cbc82fd4114219a5a0f180b1908a18985fda3e (patch)
treed71e6cd6caa0c06ec5ef79e6da1958c491da352d /repl
parentb4e76a9a3b58822fcbe5a8b137618a32c4033755 (diff)
downloadspark-90cbc82fd4114219a5a0f180b1908a18985fda3e.tar.gz
spark-90cbc82fd4114219a5a0f180b1908a18985fda3e.tar.bz2
spark-90cbc82fd4114219a5a0f180b1908a18985fda3e.zip
[SPARK-14725][CORE] Remove HttpServer class
## What changes were proposed in this pull request? This proposal removes the class `HttpServer`, with the changing of internal file/jar/class transmission to RPC layer, currently there's no code using this `HttpServer`, so here propose to remove it. ## How was this patch tested? Unit test is verified locally. Author: jerryshao <sshao@hortonworks.com> Closes #12526 from jerryshao/SPARK-14725.
Diffstat (limited to 'repl')
-rw-r--r--repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala53
1 files changed, 0 insertions, 53 deletions
diff --git a/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
index 9a143ee36f..12e98565dc 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
@@ -57,7 +57,6 @@ class ExecutorClassLoaderSuite
var tempDir2: File = _
var url1: String = _
var urls2: Array[URL] = _
- var classServer: HttpServer = _
override def beforeAll() {
super.beforeAll()
@@ -74,9 +73,6 @@ class ExecutorClassLoaderSuite
override def afterAll() {
try {
- if (classServer != null) {
- classServer.stop()
- }
Utils.deleteRecursively(tempDir1)
Utils.deleteRecursively(tempDir2)
SparkEnv.set(null)
@@ -137,55 +133,6 @@ class ExecutorClassLoaderSuite
assert(fileReader.readLine().contains("resource"), "File doesn't contain 'resource'")
}
- test("failing to fetch classes from HTTP server should not leak resources (SPARK-6209)") {
- // This is a regression test for SPARK-6209, a bug where each failed attempt to load a class
- // from the driver's class server would leak a HTTP connection, causing the class server's
- // thread / connection pool to be exhausted.
- val conf = new SparkConf()
- val securityManager = new SecurityManager(conf)
- classServer = new HttpServer(conf, tempDir1, securityManager)
- classServer.start()
- // ExecutorClassLoader uses SparkEnv's SecurityManager, so we need to mock this
- val mockEnv = mock[SparkEnv]
- when(mockEnv.securityManager).thenReturn(securityManager)
- SparkEnv.set(mockEnv)
- // Create an ExecutorClassLoader that's configured to load classes from the HTTP server
- val parentLoader = new URLClassLoader(Array.empty, null)
- val classLoader = new ExecutorClassLoader(conf, null, classServer.uri, parentLoader, false)
- classLoader.httpUrlConnectionTimeoutMillis = 500
- // Check that this class loader can actually load classes that exist
- val fakeClass = classLoader.loadClass("ReplFakeClass2").newInstance()
- val fakeClassVersion = fakeClass.toString
- assert(fakeClassVersion === "1")
- // Try to perform a full GC now, since GC during the test might mask resource leaks
- System.gc()
- // When the original bug occurs, the test thread becomes blocked in a classloading call
- // and does not respond to interrupts. Therefore, use a custom ScalaTest interruptor to
- // shut down the HTTP server when the test times out
- val interruptor: Interruptor = new Interruptor {
- override def apply(thread: Thread): Unit = {
- classServer.stop()
- classServer = null
- thread.interrupt()
- }
- }
- def tryAndFailToLoadABunchOfClasses(): Unit = {
- // The number of trials here should be much larger than Jetty's thread / connection limit
- // in order to expose thread or connection leaks
- for (i <- 1 to 1000) {
- if (Thread.currentThread().isInterrupted) {
- throw new InterruptedException()
- }
- // Incorporate the iteration number into the class name in order to avoid any response
- // caching that might be added in the future
- intercept[ClassNotFoundException] {
- classLoader.loadClass(s"ReplFakeClassDoesNotExist$i").newInstance()
- }
- }
- }
- failAfter(10 seconds)(tryAndFailToLoadABunchOfClasses())(interruptor)
- }
-
test("fetch classes using Spark's RpcEnv") {
val env = mock[SparkEnv]
val rpcEnv = mock[RpcEnv]