aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/pom.xml15
-rw-r--r--core/src/main/scala/org/apache/spark/SSLOptions.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala14
-rw-r--r--core/src/main/scala/org/apache/spark/ui/JettyUtils.scala37
-rw-r--r--dev/deps/spark-deps-hadoop-2.24
-rw-r--r--dev/deps/spark-deps-hadoop-2.34
-rw-r--r--dev/deps/spark-deps-hadoop-2.44
-rw-r--r--dev/deps/spark-deps-hadoop-2.64
-rw-r--r--dev/deps/spark-deps-hadoop-2.74
-rw-r--r--pom.xml13
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java16
-rw-r--r--streaming/pom.xml4
-rw-r--r--yarn/pom.xml4
13 files changed, 72 insertions, 53 deletions
diff --git a/core/pom.xml b/core/pom.xml
index 07b5896376..8584b62c0e 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -125,12 +125,15 @@
<artifactId>jetty-servlet</artifactId>
<scope>compile</scope>
</dependency>
- <!-- Because we mark jetty as provided and shade it, its dependency
- orbit is ignored, so we explicitly list it here (see SPARK-5557).-->
<dependency>
- <groupId>org.eclipse.jetty.orbit</groupId>
- <artifactId>javax.servlet</artifactId>
- <version>${orbit.version}</version>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-servlets</artifactId>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>javax.servlet</groupId>
+ <artifactId>javax.servlet-api</artifactId>
+ <version>${javaxservlet.version}</version>
</dependency>
<dependency>
@@ -356,7 +359,7 @@
<overWriteIfNewer>true</overWriteIfNewer>
<useSubDirectoryPerType>true</useSubDirectoryPerType>
<includeArtifactIds>
- guava,jetty-io,jetty-servlet,jetty-continuation,jetty-http,jetty-plus,jetty-util,jetty-server,jetty-security
+ guava,jetty-io,jetty-servlet,jetty-servlets,jetty-continuation,jetty-http,jetty-plus,jetty-util,jetty-server,jetty-security
</includeArtifactIds>
<silent>true</silent>
</configuration>
diff --git a/core/src/main/scala/org/apache/spark/SSLOptions.scala b/core/src/main/scala/org/apache/spark/SSLOptions.scala
index 719905a2c9..be19179b00 100644
--- a/core/src/main/scala/org/apache/spark/SSLOptions.scala
+++ b/core/src/main/scala/org/apache/spark/SSLOptions.scala
@@ -71,7 +71,7 @@ private[spark] case class SSLOptions(
keyPassword.foreach(sslContextFactory.setKeyManagerPassword)
keyStoreType.foreach(sslContextFactory.setKeyStoreType)
if (needClientAuth) {
- trustStore.foreach(file => sslContextFactory.setTrustStore(file.getAbsolutePath))
+ trustStore.foreach(file => sslContextFactory.setTrustStorePath(file.getAbsolutePath))
trustStorePassword.foreach(sslContextFactory.setTrustStorePassword)
trustStoreType.foreach(sslContextFactory.setTrustStoreType)
}
diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala
index 14244ea571..7e93bfc45e 100644
--- a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala
@@ -17,13 +17,12 @@
package org.apache.spark.deploy.rest
-import java.net.InetSocketAddress
import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse}
import scala.io.Source
import com.fasterxml.jackson.core.JsonProcessingException
-import org.eclipse.jetty.server.Server
+import org.eclipse.jetty.server.{Server, ServerConnector}
import org.eclipse.jetty.servlet.{ServletContextHandler, ServletHolder}
import org.eclipse.jetty.util.thread.QueuedThreadPool
import org.json4s._
@@ -80,10 +79,15 @@ private[spark] abstract class RestSubmissionServer(
* Return a 2-tuple of the started server and the bound port.
*/
private def doStart(startPort: Int): (Server, Int) = {
- val server = new Server(new InetSocketAddress(host, startPort))
val threadPool = new QueuedThreadPool
threadPool.setDaemon(true)
- server.setThreadPool(threadPool)
+ val server = new Server(threadPool)
+
+ val connector = new ServerConnector(server)
+ connector.setHost(host)
+ connector.setPort(startPort)
+ server.addConnector(connector)
+
val mainHandler = new ServletContextHandler
mainHandler.setContextPath("/")
contextToServlet.foreach { case (prefix, servlet) =>
@@ -91,7 +95,7 @@ private[spark] abstract class RestSubmissionServer(
}
server.setHandler(mainHandler)
server.start()
- val boundPort = server.getConnectors()(0).getLocalPort
+ val boundPort = connector.getLocalPort
(server, boundPort)
}
diff --git a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
index db24f0319b..6854f7baf7 100644
--- a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
@@ -25,11 +25,10 @@ import scala.collection.mutable.ArrayBuffer
import scala.language.implicitConversions
import scala.xml.Node
-import org.eclipse.jetty.server.{Connector, Request, Server}
+import org.eclipse.jetty.server.{Request, Server, ServerConnector}
import org.eclipse.jetty.server.handler._
-import org.eclipse.jetty.server.nio.SelectChannelConnector
-import org.eclipse.jetty.server.ssl.SslSelectChannelConnector
import org.eclipse.jetty.servlet._
+import org.eclipse.jetty.servlets.gzip.GzipHandler
import org.eclipse.jetty.util.component.LifeCycle
import org.eclipse.jetty.util.thread.QueuedThreadPool
import org.json4s.JValue
@@ -243,10 +242,16 @@ private[spark] object JettyUtils extends Logging {
// Bind to the given port, or throw a java.net.BindException if the port is occupied
def connect(currentPort: Int): (Server, Int) = {
- val server = new Server
- val connectors = new ArrayBuffer[Connector]
+ val pool = new QueuedThreadPool
+ if (serverName.nonEmpty) {
+ pool.setName(serverName)
+ }
+ pool.setDaemon(true)
+
+ val server = new Server(pool)
+ val connectors = new ArrayBuffer[ServerConnector]
// Create a connector on port currentPort to listen for HTTP requests
- val httpConnector = new SelectChannelConnector()
+ val httpConnector = new ServerConnector(server)
httpConnector.setPort(currentPort)
connectors += httpConnector
@@ -260,8 +265,9 @@ private[spark] object JettyUtils extends Logging {
}
val scheme = "https"
// Create a connector on port securePort to listen for HTTPS requests
- val connector = new SslSelectChannelConnector(factory)
+ val connector = new ServerConnector(server, factory)
connector.setPort(securePort)
+
connectors += connector
// redirect the HTTP requests to HTTPS port
@@ -269,34 +275,27 @@ private[spark] object JettyUtils extends Logging {
}
gzipHandlers.foreach(collection.addHandler)
- connectors.foreach(_.setHost(hostName))
// As each acceptor and each selector will use one thread, the number of threads should at
// least be the number of acceptors and selectors plus 1. (See SPARK-13776)
var minThreads = 1
- connectors.foreach { c =>
+ connectors.foreach { connector =>
// Currently we only use "SelectChannelConnector"
- val connector = c.asInstanceOf[SelectChannelConnector]
// Limit the max acceptor number to 8 so that we don't waste a lot of threads
- connector.setAcceptors(math.min(connector.getAcceptors, 8))
+ connector.setAcceptQueueSize(math.min(connector.getAcceptors, 8))
+ connector.setHost(hostName)
// The number of selectors always equals to the number of acceptors
minThreads += connector.getAcceptors * 2
}
server.setConnectors(connectors.toArray)
-
- val pool = new QueuedThreadPool
- if (serverName.nonEmpty) {
- pool.setName(serverName)
- }
pool.setMaxThreads(math.max(pool.getMaxThreads, minThreads))
- pool.setDaemon(true)
- server.setThreadPool(pool)
+
val errorHandler = new ErrorHandler()
errorHandler.setShowStacks(true)
server.addBean(errorHandler)
server.setHandler(collection)
try {
server.start()
- (server, server.getConnectors.head.getLocalPort)
+ (server, httpConnector.getLocalPort)
} catch {
case e: Exception =>
server.stop()
diff --git a/dev/deps/spark-deps-hadoop-2.2 b/dev/deps/spark-deps-hadoop-2.2
index cc934e9e6f..83bdd90319 100644
--- a/dev/deps/spark-deps-hadoop-2.2
+++ b/dev/deps/spark-deps-hadoop-2.2
@@ -46,7 +46,7 @@ curator-recipes-2.4.0.jar
datanucleus-api-jdo-3.2.6.jar
datanucleus-core-3.2.10.jar
datanucleus-rdbms-3.2.9.jar
-derby-10.10.1.1.jar
+derby-10.11.1.1.jar
eigenbase-properties-1.1.5.jar
guava-14.0.1.jar
guice-3.0.jar
@@ -83,7 +83,7 @@ javassist-3.18.1-GA.jar
javax.annotation-api-1.2.jar
javax.inject-1.jar
javax.inject-2.4.0-b34.jar
-javax.servlet-3.0.0.v201112011016.jar
+javax.servlet-api-3.1.0.jar
javax.ws.rs-api-2.0.1.jar
javolution-5.5.1.jar
jcl-over-slf4j-1.7.16.jar
diff --git a/dev/deps/spark-deps-hadoop-2.3 b/dev/deps/spark-deps-hadoop-2.3
index f85aab3f93..121e282618 100644
--- a/dev/deps/spark-deps-hadoop-2.3
+++ b/dev/deps/spark-deps-hadoop-2.3
@@ -48,7 +48,7 @@ curator-recipes-2.4.0.jar
datanucleus-api-jdo-3.2.6.jar
datanucleus-core-3.2.10.jar
datanucleus-rdbms-3.2.9.jar
-derby-10.10.1.1.jar
+derby-10.11.1.1.jar
eigenbase-properties-1.1.5.jar
guava-14.0.1.jar
guice-3.0.jar
@@ -86,7 +86,7 @@ javassist-3.18.1-GA.jar
javax.annotation-api-1.2.jar
javax.inject-1.jar
javax.inject-2.4.0-b34.jar
-javax.servlet-3.0.0.v201112011016.jar
+javax.servlet-api-3.1.0.jar
javax.ws.rs-api-2.0.1.jar
javolution-5.5.1.jar
jaxb-api-2.2.2.jar
diff --git a/dev/deps/spark-deps-hadoop-2.4 b/dev/deps/spark-deps-hadoop-2.4
index dfcd35193c..1d5ad27132 100644
--- a/dev/deps/spark-deps-hadoop-2.4
+++ b/dev/deps/spark-deps-hadoop-2.4
@@ -48,7 +48,7 @@ curator-recipes-2.4.0.jar
datanucleus-api-jdo-3.2.6.jar
datanucleus-core-3.2.10.jar
datanucleus-rdbms-3.2.9.jar
-derby-10.10.1.1.jar
+derby-10.11.1.1.jar
eigenbase-properties-1.1.5.jar
guava-14.0.1.jar
guice-3.0.jar
@@ -86,7 +86,7 @@ javassist-3.18.1-GA.jar
javax.annotation-api-1.2.jar
javax.inject-1.jar
javax.inject-2.4.0-b34.jar
-javax.servlet-3.0.0.v201112011016.jar
+javax.servlet-api-3.1.0.jar
javax.ws.rs-api-2.0.1.jar
javolution-5.5.1.jar
jaxb-api-2.2.2.jar
diff --git a/dev/deps/spark-deps-hadoop-2.6 b/dev/deps/spark-deps-hadoop-2.6
index a29f6700bd..909b94bde2 100644
--- a/dev/deps/spark-deps-hadoop-2.6
+++ b/dev/deps/spark-deps-hadoop-2.6
@@ -52,7 +52,7 @@ curator-recipes-2.6.0.jar
datanucleus-api-jdo-3.2.6.jar
datanucleus-core-3.2.10.jar
datanucleus-rdbms-3.2.9.jar
-derby-10.10.1.1.jar
+derby-10.11.1.1.jar
eigenbase-properties-1.1.5.jar
gson-2.2.4.jar
guava-14.0.1.jar
@@ -94,7 +94,7 @@ javassist-3.18.1-GA.jar
javax.annotation-api-1.2.jar
javax.inject-1.jar
javax.inject-2.4.0-b34.jar
-javax.servlet-3.0.0.v201112011016.jar
+javax.servlet-api-3.1.0.jar
javax.ws.rs-api-2.0.1.jar
javolution-5.5.1.jar
jaxb-api-2.2.2.jar
diff --git a/dev/deps/spark-deps-hadoop-2.7 b/dev/deps/spark-deps-hadoop-2.7
index 8955b0e977..7507599740 100644
--- a/dev/deps/spark-deps-hadoop-2.7
+++ b/dev/deps/spark-deps-hadoop-2.7
@@ -52,7 +52,7 @@ curator-recipes-2.6.0.jar
datanucleus-api-jdo-3.2.6.jar
datanucleus-core-3.2.10.jar
datanucleus-rdbms-3.2.9.jar
-derby-10.10.1.1.jar
+derby-10.11.1.1.jar
eigenbase-properties-1.1.5.jar
gson-2.2.4.jar
guava-14.0.1.jar
@@ -94,7 +94,7 @@ javassist-3.18.1-GA.jar
javax.annotation-api-1.2.jar
javax.inject-1.jar
javax.inject-2.4.0-b34.jar
-javax.servlet-3.0.0.v201112011016.jar
+javax.servlet-api-3.1.0.jar
javax.ws.rs-api-2.0.1.jar
javolution-5.5.1.jar
jaxb-api-2.2.2.jar
diff --git a/pom.xml b/pom.xml
index d71913c958..9e9aad223a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -133,11 +133,11 @@
<hive.version>1.2.1.spark2</hive.version>
<!-- Version used for internal directory structure -->
<hive.version.short>1.2.1</hive.version.short>
- <derby.version>10.10.1.1</derby.version>
+ <derby.version>10.11.1.1</derby.version>
<parquet.version>1.7.0</parquet.version>
<hive.parquet.version>1.6.0</hive.parquet.version>
- <jetty.version>8.1.19.v20160209</jetty.version>
- <orbit.version>3.0.0.v201112011016</orbit.version>
+ <jetty.version>9.2.16.v20160414</jetty.version>
+ <javaxservlet.version>3.1.0</javaxservlet.version>
<chill.version>0.8.0</chill.version>
<ivy.version>2.4.0</ivy.version>
<oro.version>2.0.8</oro.version>
@@ -330,6 +330,12 @@
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-servlets</artifactId>
+ <version>${jetty.version}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
<version>${jetty.version}</version>
<scope>provided</scope>
@@ -2264,6 +2270,7 @@
<include>org.eclipse.jetty:jetty-http</include>
<include>org.eclipse.jetty:jetty-continuation</include>
<include>org.eclipse.jetty:jetty-servlet</include>
+ <include>org.eclipse.jetty:jetty-servlets</include>
<include>org.eclipse.jetty:jetty-plus</include>
<include>org.eclipse.jetty:jetty-security</include>
<include>org.eclipse.jetty:jetty-util</include>
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
index 3b57efa38b..37e4845cce 100644
--- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
@@ -37,8 +37,7 @@ import org.apache.thrift.TProcessor;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocolFactory;
import org.apache.thrift.server.TServlet;
-import org.eclipse.jetty.server.nio.SelectChannelConnector;
-import org.eclipse.jetty.server.ssl.SslSelectChannelConnector;
+import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.util.ssl.SslContextFactory;
@@ -59,9 +58,6 @@ public class ThriftHttpCLIService extends ThriftCLIService {
@Override
public void run() {
try {
- // HTTP Server
- httpServer = new org.eclipse.jetty.server.Server();
-
// Server thread pool
// Start with minWorkerThreads, expand till maxWorkerThreads and reject subsequent requests
String threadPoolName = "HiveServer2-HttpHandler-Pool";
@@ -69,10 +65,12 @@ public class ThriftHttpCLIService extends ThriftCLIService {
workerKeepAliveTime, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(),
new ThreadFactoryWithGarbageCleanup(threadPoolName));
ExecutorThreadPool threadPool = new ExecutorThreadPool(executorService);
- httpServer.setThreadPool(threadPool);
+
+ // HTTP Server
+ httpServer = new org.eclipse.jetty.server.Server(threadPool);
// Connector configs
- SelectChannelConnector connector = new SelectChannelConnector();
+ ServerConnector connector = new ServerConnector(httpServer);
boolean useSsl = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL);
String schemeName = useSsl ? "https" : "http";
// Change connector if SSL is used
@@ -92,14 +90,14 @@ public class ThriftHttpCLIService extends ThriftCLIService {
Arrays.toString(sslContextFactory.getExcludeProtocols()));
sslContextFactory.setKeyStorePath(keyStorePath);
sslContextFactory.setKeyStorePassword(keyStorePassword);
- connector = new SslSelectChannelConnector(sslContextFactory);
+ connector = new ServerConnector(httpServer, sslContextFactory);
}
connector.setPort(portNum);
// Linux:yes, Windows:no
connector.setReuseAddress(!Shell.WINDOWS);
int maxIdleTime = (int) hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME,
TimeUnit.MILLISECONDS);
- connector.setMaxIdleTime(maxIdleTime);
+ connector.setIdleTimeout(maxIdleTime);
httpServer.addConnector(connector);
diff --git a/streaming/pom.xml b/streaming/pom.xml
index 7d409c5d3b..e7415863e3 100644
--- a/streaming/pom.xml
+++ b/streaming/pom.xml
@@ -77,6 +77,10 @@
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-servlets</artifactId>
+ </dependency>
<!-- End of shaded deps. -->
<dependency>
diff --git a/yarn/pom.xml b/yarn/pom.xml
index db7f3e51d3..11df2b3f4f 100644
--- a/yarn/pom.xml
+++ b/yarn/pom.xml
@@ -102,6 +102,10 @@
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-servlets</artifactId>
+ </dependency>
<!-- End of shaded deps. -->
<!--