Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 89727200BAE for ; Fri, 28 Oct 2016 20:04:19 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 88072160B07; Fri, 28 Oct 2016 18:04:19 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 3FB1A160B06 for ; Fri, 28 Oct 2016 20:04:17 +0200 (CEST) Received: (qmail 11992 invoked by uid 500); 28 Oct 2016 18:04:01 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 10661 invoked by uid 99); 28 Oct 2016 18:04:00 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 28 Oct 2016 18:04:00 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 4B980F1596; Fri, 28 Oct 2016 18:04:00 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: virajith@apache.org To: common-commits@hadoop.apache.org Date: Fri, 28 Oct 2016 18:04:37 -0000 Message-Id: <6669c8e396dd4f8ca332e79c13d3a8db@git.apache.org> In-Reply-To: <523e71751e7f42dc8629ed1a6f66b2a9@git.apache.org> References: <523e71751e7f42dc8629ed1a6f66b2a9@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [39/50] [abbrv] hadoop git commit: HADOOP-10075. Update jetty dependency to version 9 (rkanter) archived-at: Fri, 28 Oct 2016 18:04:19 -0000 HADOOP-10075. Update jetty dependency to version 9 (rkanter) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5877f20f Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5877f20f Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5877f20f Branch: refs/heads/HDFS-9806 Commit: 5877f20f9c3f6f0afa505715e9a2ee312475af17 Parents: 9e03ee5 Author: Robert Kanter Authored: Thu Oct 27 16:01:23 2016 -0700 Committer: Robert Kanter Committed: Thu Oct 27 16:09:00 2016 -0700 ---------------------------------------------------------------------- hadoop-client/pom.xml | 20 +- .../hadoop-auth-examples/pom.xml | 2 +- .../examples/RequestLoggerFilter.java | 12 + hadoop-common-project/hadoop-auth/pom.xml | 13 +- .../client/AuthenticatorTestCase.java | 29 +- hadoop-common-project/hadoop-common/pom.xml | 34 +- .../hadoop/http/AdminAuthorizedServlet.java | 2 +- .../org/apache/hadoop/http/HttpRequestLog.java | 4 +- .../org/apache/hadoop/http/HttpServer2.java | 305 +- .../java/org/apache/hadoop/http/JettyUtils.java | 35 + .../ssl/SslSelectChannelConnectorSecure.java | 58 - .../org/apache/hadoop/conf/TestConfServlet.java | 2 +- .../hadoop/fs/FSMainOperationsBaseTest.java | 4 +- .../fs/viewfs/ViewFileSystemTestSetup.java | 10 +- .../hadoop/fs/viewfs/ViewFsTestSetup.java | 10 +- .../http/TestAuthenticationSessionCookie.java | 11 +- .../apache/hadoop/http/TestHttpRequestLog.java | 4 +- .../org/apache/hadoop/http/TestHttpServer.java | 22 +- .../apache/hadoop/http/TestServletFilter.java | 7 +- .../hadoop/http/resource/JerseyResource.java | 5 +- .../delegation/web/TestWebDelegationToken.java | 64 +- hadoop-common-project/hadoop-kms/pom.xml | 20 +- .../hadoop/crypto/key/kms/server/KMS.java | 21 +- .../key/kms/server/KMSAuthenticationFilter.java | 12 + .../crypto/key/kms/server/KMSJSONWriter.java | 3 +- .../hadoop/crypto/key/kms/server/MiniKMS.java | 63 +- hadoop-common-project/hadoop-nfs/pom.xml | 2 +- hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml | 26 +- .../hadoop/fs/http/server/HttpFSServer.java | 12 +- .../apache/hadoop/lib/wsrs/JSONMapProvider.java | 3 +- .../apache/hadoop/lib/wsrs/JSONProvider.java | 3 +- .../fs/http/client/BaseTestHttpFSWith.java | 6 +- .../hadoop/fs/http/server/TestHttpFSServer.java | 6 +- .../fs/http/server/TestHttpFSServerNoACLs.java | 6 +- .../http/server/TestHttpFSServerNoXAttrs.java | 6 +- .../fs/http/server/TestHttpFSWithKerberos.java | 6 +- .../org/apache/hadoop/test/TestHFSTestCase.java | 8 +- .../org/apache/hadoop/test/TestHTestCase.java | 8 +- .../org/apache/hadoop/test/TestJettyHelper.java | 56 +- hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml | 8 +- hadoop-hdfs-project/hadoop-hdfs/pom.xml | 25 +- .../hdfs/qjournal/server/JournalNode.java | 2 +- .../hadoop/hdfs/server/datanode/DataNode.java | 2 +- .../hdfs/server/namenode/FSNamesystem.java | 2 +- .../hadoop/hdfs/server/namenode/NNStorage.java | 2 +- .../hdfs/server/namenode/TransferFsImage.java | 2 +- .../web/resources/NamenodeWebHdfsMethods.java | 39 +- .../apache/hadoop/hdfs/TestDecommission.java | 2 +- .../qjournal/server/TestJournalNodeMXBean.java | 2 +- .../blockmanagement/TestBlockStatsMXBean.java | 2 +- .../server/datanode/TestDataNodeMXBean.java | 2 +- .../server/namenode/TestFSNamesystemMBean.java | 2 +- .../server/namenode/TestNameNodeMXBean.java | 2 +- .../namenode/TestStartupProgressServlet.java | 2 +- .../server/namenode/TestTransferFsImage.java | 2 +- .../hadoop/hdfs/web/TestWebHDFSForHA.java | 2 +- .../hadoop/test/MiniDFSClusterManager.java | 2 +- .../hadoop/mapreduce/v2/app/JobEndNotifier.java | 37 +- .../mapreduce/v2/app/webapp/AMWebServices.java | 49 +- .../v2/app/webapp/TestAMWebServices.java | 31 +- .../v2/app/webapp/TestAMWebServicesAttempt.java | 13 +- .../app/webapp/TestAMWebServicesAttempts.java | 34 +- .../v2/app/webapp/TestAMWebServicesJobConf.java | 13 +- .../v2/app/webapp/TestAMWebServicesJobs.java | 64 +- .../v2/app/webapp/TestAMWebServicesTasks.java | 61 +- .../mapreduce/v2/hs/webapp/HsWebServices.java | 40 +- .../v2/hs/webapp/TestHsWebServices.java | 25 +- .../v2/hs/webapp/TestHsWebServicesAttempts.java | 34 +- .../v2/hs/webapp/TestHsWebServicesJobConf.java | 13 +- .../v2/hs/webapp/TestHsWebServicesJobs.java | 67 +- .../hs/webapp/TestHsWebServicesJobsQuery.java | 76 +- .../v2/hs/webapp/TestHsWebServicesTasks.java | 61 +- .../hadoop/mapred/NotificationTestCase.java | 12 +- .../mapreduce/MiniHadoopClusterManager.java | 2 +- .../apache/hadoop/mapred/ShuffleHandler.java | 14 +- .../hadoop/mapred/TestShuffleHandler.java | 25 +- .../hadoop-mapreduce-client/pom.xml | 4 +- hadoop-mapreduce-project/pom.xml | 4 +- hadoop-maven-plugins/pom.xml | 4 + .../maven/plugin/resourcegz/ResourceGzMojo.java | 125 + hadoop-project/pom.xml | 52 +- hadoop-tools/hadoop-azure-datalake/pom.xml | 2 +- hadoop-tools/hadoop-azure/pom.xml | 7 +- .../fs/azure/AzureNativeFileSystemStore.java | 2 +- .../hadoop/fs/azure/BlockBlobAppendStream.java | 6 +- hadoop-tools/hadoop-sls/pom.xml | 10 +- .../apache/hadoop/yarn/sls/web/SLSWebApp.java | 19 +- .../yarn/client/api/impl/TestAMRMClient.java | 8 +- .../hadoop/yarn/client/cli/TestYarnCLI.java | 10 +- .../hadoop-yarn/hadoop-yarn-common/pom.xml | 17 +- .../static/dt-1.9.4/js/jquery.dataTables.min.js | 157 + .../dt-1.9.4/js/jquery.dataTables.min.js.gz | Bin 21726 -> 0 bytes .../webapps/static/jquery/jquery-1.8.2.min.js | 2 + .../static/jquery/jquery-1.8.2.min.js.gz | Bin 33430 -> 0 bytes .../static/jquery/jquery-ui-1.9.1.custom.min.js | 6 + .../jquery/jquery-ui-1.9.1.custom.min.js.gz | Bin 62855 -> 0 bytes .../webapps/static/jt/jquery.jstree.js | 4544 ++++++++++++++++++ .../webapps/static/jt/jquery.jstree.js.gz | Bin 37540 -> 0 bytes .../hadoop/yarn/webapp/MyTestWebService.java | 3 +- .../pom.xml | 2 +- .../ApplicationHistoryServer.java | 50 +- .../webapp/AHSWebServices.java | 27 +- .../timeline/webapp/TimelineWebServices.java | 23 +- .../webapp/TestAHSWebServices.java | 22 +- .../TestRollingLevelDBTimelineStore.java | 6 +- .../webapp/TestTimelineWebServices.java | 141 +- .../hadoop-yarn-server-nodemanager/pom.xml | 4 +- .../nodemanager/webapp/NMWebServices.java | 28 +- .../amrmproxy/MockResourceManagerFacade.java | 11 +- .../TestLogAggregationService.java | 2 +- .../nodemanager/webapp/TestNMWebServices.java | 27 +- .../webapp/TestNMWebServicesApps.java | 49 +- .../webapp/TestNMWebServicesContainers.java | 25 +- .../hadoop-yarn-server-resourcemanager/pom.xml | 8 +- .../yarn/server/resourcemanager/RMNMInfo.java | 2 +- .../resourcemanager/webapp/RMWebServices.java | 121 +- .../yarn/server/resourcemanager/MockNM.java | 4 +- .../yarn/server/resourcemanager/TestRMHA.java | 4 +- .../planning/TestAlignedPlanner.java | 4 +- .../planning/TestGreedyReservationAgent.java | 4 +- .../webapp/TestRMWebServices.java | 46 +- .../webapp/TestRMWebServicesApps.java | 151 +- .../TestRMWebServicesAppsModification.java | 40 +- .../webapp/TestRMWebServicesCapacitySched.java | 19 +- .../TestRMWebServicesDelegationTokens.java | 4 +- .../webapp/TestRMWebServicesFairScheduler.java | 10 +- .../TestRMWebServicesForCSWithPartitions.java | 13 +- .../webapp/TestRMWebServicesNodeLabels.java | 55 +- .../webapp/TestRMWebServicesNodes.java | 58 +- .../webapp/TestRMWebServicesReservation.java | 13 +- .../TestRMWebServicesSchedulerActivities.java | 91 +- .../yarn/webapp/TestRMWithCSRFFilter.java | 7 +- .../pom.xml | 2 +- .../pom.xml | 16 + ...stTimelineReaderWebServicesHBaseStorage.java | 50 +- .../hadoop-yarn-server-timelineservice/pom.xml | 18 +- .../collector/TimelineCollectorWebService.java | 4 +- .../reader/TimelineReaderServer.java | 25 +- .../reader/TimelineReaderWebServices.java | 47 +- .../reader/TestTimelineReaderWebServices.java | 73 +- .../hadoop-yarn-server-web-proxy/pom.xml | 6 +- .../server/webproxy/TestWebAppProxyServlet.java | 39 +- pom.xml | 3 +- 143 files changed, 6868 insertions(+), 1161 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-client/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-client/pom.xml b/hadoop-client/pom.xml index b330a3d..988dd03 100644 --- a/hadoop-client/pom.xml +++ b/hadoop-client/pom.xml @@ -41,7 +41,7 @@ javax.servlet - servlet-api + javax.servlet-api commons-logging @@ -49,18 +49,18 @@ jetty - org.mortbay.jetty + org.eclipse.jetty - org.mortbay.jetty - jetty + org.eclipse.jetty + jetty-server - org.mortbay.jetty + org.eclipse.jetty jetty-util - org.mortbay.jetty + org.eclipse.jetty servlet-api-2.5 @@ -112,8 +112,8 @@ avro - org.mortbay.jetty - jetty + org.eclipse.jetty + jetty-server com.sun.jersey @@ -125,7 +125,7 @@ javax.servlet - servlet-api + javax.servlet-api @@ -137,7 +137,7 @@ javax.servlet - servlet-api + javax.servlet-api org.apache.hadoop http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-auth-examples/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-auth-examples/pom.xml b/hadoop-common-project/hadoop-auth-examples/pom.xml index c36c157..d841ace 100644 --- a/hadoop-common-project/hadoop-auth-examples/pom.xml +++ b/hadoop-common-project/hadoop-auth-examples/pom.xml @@ -34,7 +34,7 @@ javax.servlet - servlet-api + javax.servlet-api provided http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/RequestLoggerFilter.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/RequestLoggerFilter.java b/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/RequestLoggerFilter.java index a9721c9..8048991 100644 --- a/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/RequestLoggerFilter.java +++ b/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/RequestLoggerFilter.java @@ -139,7 +139,19 @@ public class RequestLoggerFilter implements Filter { status = sc; } + /** + * Calls setStatus(int sc, String msg) on the wrapped + * {@link HttpServletResponseWrapper} object. + * + * @param sc the status code + * @param msg the status message + * @deprecated {@link HttpServletResponseWrapper#setStatus(int, String)} is + * deprecated. To set a status code use {@link #setStatus(int)}, to send an + * error with a description use {@link #sendError(int, String)} + */ @Override + @Deprecated + @SuppressWarnings("deprecation") public void setStatus(int sc, String msg) { super.setStatus(sc, msg); status = sc; http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-auth/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-auth/pom.xml b/hadoop-common-project/hadoop-auth/pom.xml index 0b37715..5df4bdb 100644 --- a/hadoop-common-project/hadoop-auth/pom.xml +++ b/hadoop-common-project/hadoop-auth/pom.xml @@ -53,13 +53,18 @@ test - org.mortbay.jetty + org.eclipse.jetty jetty-util test - org.mortbay.jetty - jetty + org.eclipse.jetty + jetty-server + test + + + org.eclipse.jetty + jetty-servlet test @@ -74,7 +79,7 @@ javax.servlet - servlet-api + javax.servlet-api provided http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java index 35e40d8..8b9d45e 100644 --- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java +++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java @@ -30,11 +30,14 @@ import org.apache.http.impl.auth.SPNegoScheme; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.util.EntityUtils; -import org.mortbay.jetty.Server; -import org.mortbay.jetty.servlet.Context; -import org.mortbay.jetty.servlet.FilterHolder; -import org.mortbay.jetty.servlet.ServletHolder; - +import org.eclipse.jetty.server.Connector; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.server.ServerConnector; +import org.eclipse.jetty.servlet.FilterHolder; +import org.eclipse.jetty.servlet.ServletContextHandler; +import org.eclipse.jetty.servlet.ServletHolder; + +import javax.servlet.DispatcherType; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; @@ -53,6 +56,7 @@ import java.net.HttpURLConnection; import java.net.ServerSocket; import java.net.URL; import java.security.Principal; +import java.util.EnumSet; import java.util.Properties; import org.junit.Assert; @@ -63,7 +67,7 @@ public class AuthenticatorTestCase { private int port = -1; private boolean useTomcat = false; private Tomcat tomcat = null; - Context context; + ServletContextHandler context; private static Properties authenticatorConfig; @@ -121,16 +125,19 @@ public class AuthenticatorTestCase { } protected void startJetty() throws Exception { - server = new Server(0); - context = new Context(); + server = new Server(); + context = new ServletContextHandler(); context.setContextPath("/foo"); server.setHandler(context); - context.addFilter(new FilterHolder(TestFilter.class), "/*", 0); + context.addFilter(new FilterHolder(TestFilter.class), "/*", + EnumSet.of(DispatcherType.REQUEST)); context.addServlet(new ServletHolder(TestServlet.class), "/bar"); host = "localhost"; port = getLocalPort(); - server.getConnectors()[0].setHost(host); - server.getConnectors()[0].setPort(port); + ServerConnector connector = new ServerConnector(server); + connector.setHost(host); + connector.setPort(port); + server.setConnectors(new Connector[] {connector}); server.start(); System.out.println("Running embedded servlet container at: http://" + host + ":" + port); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index fd9b7cd..645d495 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -93,25 +93,35 @@ javax.servlet - servlet-api + javax.servlet-api compile - org.mortbay.jetty - jetty + org.eclipse.jetty + jetty-server compile - org.mortbay.jetty + org.eclipse.jetty jetty-util compile - org.mortbay.jetty - jetty-sslengine + org.eclipse.jetty + jetty-servlet compile + org.eclipse.jetty + jetty-webapp + compile + + + org.eclipse.jetty + jetty-util-ajax + test + + javax.servlet.jsp jsp-api runtime @@ -412,6 +422,18 @@ ${project.build.directory}/generated-test-sources/java + + resource-gz + generate-resources + + resource-gz + + + ${basedir}/src/main/webapps/static + ${basedir}/target/webapps/static + js,css + + http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java index ef562b4..a4b05a1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java @@ -23,7 +23,7 @@ import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import org.mortbay.jetty.servlet.DefaultServlet; +import org.eclipse.jetty.servlet.DefaultServlet; /** * General servlet which is admin-authorized. http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java index 52d9850..a7c23b9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java @@ -25,8 +25,8 @@ import org.apache.commons.logging.LogConfigurationException; import org.apache.commons.logging.LogFactory; import org.apache.log4j.Appender; import org.apache.log4j.Logger; -import org.mortbay.jetty.NCSARequestLog; -import org.mortbay.jetty.RequestLog; +import org.eclipse.jetty.server.NCSARequestLog; +import org.eclipse.jetty.server.RequestLog; /** * RequestLog object for use with Http http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java index a2bb18f..49ec90a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java @@ -56,7 +56,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.security.AuthenticationFilterInitializer; import org.apache.hadoop.security.authentication.util.SignerSecretProvider; -import org.apache.hadoop.security.ssl.SslSelectChannelConnectorSecure; import org.apache.hadoop.jmx.JMXJsonServlet; import org.apache.hadoop.log.LogLevel; import org.apache.hadoop.security.SecurityUtil; @@ -65,34 +64,39 @@ import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.Shell; -import org.mortbay.io.Buffer; -import org.mortbay.jetty.Connector; -import org.mortbay.jetty.Handler; -import org.mortbay.jetty.MimeTypes; -import org.mortbay.jetty.RequestLog; -import org.mortbay.jetty.Server; -import org.mortbay.jetty.SessionManager; -import org.mortbay.jetty.handler.ContextHandler; -import org.mortbay.jetty.handler.ContextHandlerCollection; -import org.mortbay.jetty.handler.HandlerCollection; -import org.mortbay.jetty.handler.RequestLogHandler; -import org.mortbay.jetty.nio.SelectChannelConnector; -import org.mortbay.jetty.security.SslSelectChannelConnector; -import org.mortbay.jetty.servlet.AbstractSessionManager; -import org.mortbay.jetty.servlet.Context; -import org.mortbay.jetty.servlet.DefaultServlet; -import org.mortbay.jetty.servlet.FilterHolder; -import org.mortbay.jetty.servlet.SessionHandler; -import org.mortbay.jetty.servlet.FilterMapping; -import org.mortbay.jetty.servlet.ServletHandler; -import org.mortbay.jetty.servlet.ServletHolder; -import org.mortbay.jetty.webapp.WebAppContext; -import org.mortbay.thread.QueuedThreadPool; -import org.mortbay.util.MultiException; +import org.eclipse.jetty.http.HttpVersion; +import org.eclipse.jetty.server.ConnectionFactory; +import org.eclipse.jetty.server.Connector; +import org.eclipse.jetty.server.Handler; +import org.eclipse.jetty.server.HttpConfiguration; +import org.eclipse.jetty.server.HttpConnectionFactory; +import org.eclipse.jetty.server.RequestLog; +import org.eclipse.jetty.server.SecureRequestCustomizer; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.server.ServerConnector; +import org.eclipse.jetty.server.SessionManager; +import org.eclipse.jetty.server.SslConnectionFactory; +import org.eclipse.jetty.server.handler.ContextHandlerCollection; +import org.eclipse.jetty.server.handler.HandlerCollection; +import org.eclipse.jetty.server.handler.RequestLogHandler; +import org.eclipse.jetty.server.session.AbstractSessionManager; +import org.eclipse.jetty.server.session.SessionHandler; +import org.eclipse.jetty.servlet.DefaultServlet; +import org.eclipse.jetty.servlet.FilterHolder; +import org.eclipse.jetty.servlet.FilterMapping; +import org.eclipse.jetty.servlet.ServletContextHandler; +import org.eclipse.jetty.servlet.ServletHandler; +import org.eclipse.jetty.servlet.ServletHolder; +import org.eclipse.jetty.servlet.ServletMapping; +import org.eclipse.jetty.util.ArrayUtil; +import org.eclipse.jetty.util.MultiException; +import org.eclipse.jetty.webapp.WebAppContext; +import org.eclipse.jetty.util.thread.QueuedThreadPool; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.sun.jersey.spi.container.servlet.ServletContainer; +import org.eclipse.jetty.util.ssl.SslContextFactory; /** * Create a Jetty embedded server to answer http requests. The primary goal is @@ -126,11 +130,13 @@ public final class HttpServer2 implements FilterContainer { protected final Server webServer; - private final List listeners = Lists.newArrayList(); + private final HandlerCollection handlers; + + private final List listeners = Lists.newArrayList(); protected final WebAppContext webAppContext; protected final boolean findPort; - protected final Map defaultContexts = + private final Map defaultContexts = new HashMap<>(); protected final List filterNames = new ArrayList<>(); static final String STATE_DESCRIPTION_ALIVE = " - alive"; @@ -327,49 +333,59 @@ public final class HttpServer2 implements FilterContainer { } for (URI ep : endpoints) { - final Connector listener; + final ServerConnector connector; String scheme = ep.getScheme(); if ("http".equals(scheme)) { - listener = HttpServer2.createDefaultChannelConnector(); + connector = + HttpServer2.createDefaultChannelConnector(server.webServer); } else if ("https".equals(scheme)) { - listener = createHttpsChannelConnector(); + connector = createHttpsChannelConnector(server.webServer); } else { throw new HadoopIllegalArgumentException( "unknown scheme for endpoint:" + ep); } - listener.setHost(ep.getHost()); - listener.setPort(ep.getPort() == -1 ? 0 : ep.getPort()); - server.addListener(listener); + connector.setHost(ep.getHost()); + connector.setPort(ep.getPort() == -1 ? 0 : ep.getPort()); + server.addListener(connector); } server.loadListeners(); return server; } - private Connector createHttpsChannelConnector() { - SslSelectChannelConnector c = new SslSelectChannelConnectorSecure(); - configureChannelConnector(c); - - c.setNeedClientAuth(needsClientAuth); - c.setKeyPassword(keyPassword); - + private ServerConnector createHttpsChannelConnector(Server server) { + ServerConnector conn = new ServerConnector(server); + HttpConfiguration httpConfig = new HttpConfiguration(); + httpConfig.setRequestHeaderSize(JettyUtils.HEADER_SIZE); + httpConfig.setResponseHeaderSize(JettyUtils.HEADER_SIZE); + httpConfig.setSecureScheme("https"); + httpConfig.addCustomizer(new SecureRequestCustomizer()); + ConnectionFactory connFactory = new HttpConnectionFactory(httpConfig); + conn.addConnectionFactory(connFactory); + configureChannelConnector(conn); + + SslContextFactory sslContextFactory = new SslContextFactory(); + sslContextFactory.setNeedClientAuth(needsClientAuth); + sslContextFactory.setKeyManagerPassword(keyPassword); if (keyStore != null) { - c.setKeystore(keyStore); - c.setKeystoreType(keyStoreType); - c.setPassword(keyStorePassword); + sslContextFactory.setKeyStorePath(keyStore); + sslContextFactory.setKeyStoreType(keyStoreType); + sslContextFactory.setKeyStorePassword(keyStorePassword); } - if (trustStore != null) { - c.setTruststore(trustStore); - c.setTruststoreType(trustStoreType); - c.setTrustPassword(trustStorePassword); + sslContextFactory.setTrustStorePath(trustStore); + sslContextFactory.setTrustStoreType(trustStoreType); + sslContextFactory.setTrustStorePassword(trustStorePassword); } - if(null != excludeCiphers && !excludeCiphers.isEmpty()) { - c.setExcludeCipherSuites(excludeCiphers.split(",")); + sslContextFactory.setExcludeCipherSuites(excludeCiphers.split(",")); LOG.info("Excluded Cipher List:" + excludeCiphers); } - return c; + + conn.addFirstConnectionFactory(new SslConnectionFactory(sslContextFactory, + HttpVersion.HTTP_1_1.asString())); + + return conn; } } @@ -377,6 +393,7 @@ public final class HttpServer2 implements FilterContainer { final String appDir = getWebAppsPath(b.name); this.webServer = new Server(); this.adminsAcl = b.adminsAcl; + this.handlers = new HandlerCollection(); this.webAppContext = createWebAppContext(b.name, b.conf, adminsAcl, appDir); this.xFrameOptionIsEnabled = b.xFrameEnabled; this.xFrameOption = b.xFrameOption; @@ -406,36 +423,33 @@ public final class HttpServer2 implements FilterContainer { int maxThreads = conf.getInt(HTTP_MAX_THREADS, -1); // If HTTP_MAX_THREADS is not configured, QueueThreadPool() will use the // default value (currently 250). - QueuedThreadPool threadPool = maxThreads == -1 ? new QueuedThreadPool() - : new QueuedThreadPool(maxThreads); + + QueuedThreadPool threadPool = (QueuedThreadPool) webServer.getThreadPool(); threadPool.setDaemon(true); - webServer.setThreadPool(threadPool); + if (maxThreads != -1) { + threadPool.setMaxThreads(maxThreads); + } SessionManager sm = webAppContext.getSessionHandler().getSessionManager(); if (sm instanceof AbstractSessionManager) { AbstractSessionManager asm = (AbstractSessionManager)sm; asm.setHttpOnly(true); - asm.setSecureCookies(true); + asm.getSessionCookieConfig().setSecure(true); } ContextHandlerCollection contexts = new ContextHandlerCollection(); RequestLog requestLog = HttpRequestLog.getRequestLog(name); + handlers.addHandler(contexts); if (requestLog != null) { RequestLogHandler requestLogHandler = new RequestLogHandler(); requestLogHandler.setRequestLog(requestLog); - HandlerCollection handlers = new HandlerCollection(); - handlers.setHandlers(new Handler[] {contexts, requestLogHandler}); - webServer.setHandler(handlers); - } else { - webServer.setHandler(contexts); + handlers.addHandler(requestLogHandler); } - + handlers.addHandler(webAppContext); final String appDir = getWebAppsPath(name); - - webServer.addHandler(webAppContext); - addDefaultApps(contexts, appDir, conf); + webServer.setHandler(handlers); Map xFrameParams = new HashMap<>(); xFrameParams.put(X_FRAME_ENABLED, @@ -461,7 +475,7 @@ public final class HttpServer2 implements FilterContainer { } } - private void addListener(Connector connector) { + private void addListener(ServerConnector connector) { listeners.add(connector); } @@ -507,16 +521,14 @@ public final class HttpServer2 implements FilterContainer { return prop; } - private static void addNoCacheFilter(WebAppContext ctxt) { + private static void addNoCacheFilter(ServletContextHandler ctxt) { defineFilter(ctxt, NO_CACHE_FILTER, NoCacheFilter.class.getName(), Collections. emptyMap(), new String[] { "/*" }); } - private static void configureChannelConnector(SelectChannelConnector c) { - c.setLowResourceMaxIdleTime(10000); + private static void configureChannelConnector(ServerConnector c) { + c.setIdleTimeout(10000); c.setAcceptQueueSize(128); - c.setResolveNames(false); - c.setUseDirectBuffers(false); if(Shell.WINDOWS) { // result of setting the SO_REUSEADDR flag is different on Windows // http://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx @@ -524,14 +536,18 @@ public final class HttpServer2 implements FilterContainer { // the same port with indeterminate routing of incoming requests to them c.setReuseAddress(false); } - c.setHeaderBufferSize(1024*64); } @InterfaceAudience.Private - public static Connector createDefaultChannelConnector() { - SelectChannelConnector ret = new SelectChannelConnector(); - configureChannelConnector(ret); - return ret; + public static ServerConnector createDefaultChannelConnector(Server server) { + ServerConnector conn = new ServerConnector(server); + HttpConfiguration httpConfig = new HttpConfiguration(); + httpConfig.setRequestHeaderSize(JettyUtils.HEADER_SIZE); + httpConfig.setResponseHeaderSize(JettyUtils.HEADER_SIZE); + ConnectionFactory connFactory = new HttpConnectionFactory(httpConfig); + conn.addConnectionFactory(connFactory); + configureChannelConnector(conn); + return conn; } /** Get an array of FilterConfiguration specified in the conf */ @@ -567,7 +583,8 @@ public final class HttpServer2 implements FilterContainer { CommonConfigurationKeys.HADOOP_HTTP_LOGS_ENABLED, CommonConfigurationKeys.HADOOP_HTTP_LOGS_ENABLED_DEFAULT); if (logDir != null && logsEnabled) { - Context logContext = new Context(parent, "/logs"); + ServletContextHandler logContext = + new ServletContextHandler(parent, "/logs"); logContext.setResourceBase(logDir); logContext.addServlet(AdminAuthorizedServlet.class, "/*"); if (conf.getBoolean( @@ -575,8 +592,7 @@ public final class HttpServer2 implements FilterContainer { CommonConfigurationKeys.DEFAULT_HADOOP_JETTY_LOGS_SERVE_ALIASES)) { @SuppressWarnings("unchecked") Map params = logContext.getInitParams(); - params.put( - "org.mortbay.jetty.servlet.Default.aliases", "true"); + params.put("org.eclipse.jetty.servlet.Default.aliases", "true"); } logContext.setDisplayName("logs"); SessionHandler handler = new SessionHandler(); @@ -584,34 +600,37 @@ public final class HttpServer2 implements FilterContainer { if (sm instanceof AbstractSessionManager) { AbstractSessionManager asm = (AbstractSessionManager) sm; asm.setHttpOnly(true); - asm.setSecureCookies(true); + asm.getSessionCookieConfig().setSecure(true); } logContext.setSessionHandler(handler); setContextAttributes(logContext, conf); - addNoCacheFilter(webAppContext); + addNoCacheFilter(logContext); defaultContexts.put(logContext, true); } // set up the context for "/static/*" - Context staticContext = new Context(parent, "/static"); + ServletContextHandler staticContext = + new ServletContextHandler(parent, "/static"); staticContext.setResourceBase(appDir + "/static"); staticContext.addServlet(DefaultServlet.class, "/*"); staticContext.setDisplayName("static"); @SuppressWarnings("unchecked") Map params = staticContext.getInitParams(); - params.put("org.mortbay.jetty.servlet.Default.dirAllowed", "false"); + params.put("org.eclipse.jetty.servlet.Default.dirAllowed", "false"); + params.put("org.eclipse.jetty.servlet.Default.gzip", "true"); SessionHandler handler = new SessionHandler(); SessionManager sm = handler.getSessionManager(); if (sm instanceof AbstractSessionManager) { AbstractSessionManager asm = (AbstractSessionManager) sm; asm.setHttpOnly(true); - asm.setSecureCookies(true); + asm.getSessionCookieConfig().setSecure(true); } staticContext.setSessionHandler(handler); setContextAttributes(staticContext, conf); defaultContexts.put(staticContext, true); } - private void setContextAttributes(Context context, Configuration conf) { + private void setContextAttributes(ServletContextHandler context, + Configuration conf) { context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl); } @@ -627,9 +646,9 @@ public final class HttpServer2 implements FilterContainer { addServlet("conf", "/conf", ConfServlet.class); } - public void addContext(Context ctxt, boolean isFiltered) { - webServer.addHandler(ctxt); - addNoCacheFilter(webAppContext); + public void addContext(ServletContextHandler ctxt, boolean isFiltered) { + handlers.addHandler(ctxt); + addNoCacheFilter(ctxt); defaultContexts.put(ctxt, isFiltered); } @@ -691,7 +710,7 @@ public final class HttpServer2 implements FilterContainer { * protect with Kerberos authentication. * Note: This method is to be used for adding servlets that facilitate * internal communication and not for user facing functionality. For - + * servlets added using this method, filters (except internal Kerberos + * servlets added using this method, filters (except internal Kerberos * filters) are not enabled. * * @param name The name of the servlet (can be passed as null) @@ -705,19 +724,58 @@ public final class HttpServer2 implements FilterContainer { if (name != null) { holder.setName(name); } + // Jetty doesn't like the same path spec mapping to different servlets, so + // if there's already a mapping for this pathSpec, remove it and assume that + // the newest one is the one we want + final ServletMapping[] servletMappings = + webAppContext.getServletHandler().getServletMappings(); + for (int i = 0; i < servletMappings.length; i++) { + if (servletMappings[i].containsPathSpec(pathSpec)) { + if (LOG.isDebugEnabled()) { + LOG.debug("Found existing " + servletMappings[i].getServletName() + + " servlet at path " + pathSpec + "; will replace mapping" + + " with " + holder.getName() + " servlet"); + } + ServletMapping[] newServletMappings = + ArrayUtil.removeFromArray(servletMappings, servletMappings[i]); + webAppContext.getServletHandler() + .setServletMappings(newServletMappings); + break; + } + } webAppContext.addServlet(holder, pathSpec); if(requireAuth && UserGroupInformation.isSecurityEnabled()) { - LOG.info("Adding Kerberos (SPNEGO) filter to " + name); - ServletHandler handler = webAppContext.getServletHandler(); - FilterMapping fmap = new FilterMapping(); - fmap.setPathSpec(pathSpec); - fmap.setFilterName(SPNEGO_FILTER); - fmap.setDispatches(Handler.ALL); - handler.addFilterMapping(fmap); + LOG.info("Adding Kerberos (SPNEGO) filter to " + name); + ServletHandler handler = webAppContext.getServletHandler(); + FilterMapping fmap = new FilterMapping(); + fmap.setPathSpec(pathSpec); + fmap.setFilterName(SPNEGO_FILTER); + fmap.setDispatches(FilterMapping.ALL); + handler.addFilterMapping(fmap); } } + /** + * Add the given handler to the front of the list of handlers. + * + * @param handler The handler to add + */ + public void addHandlerAtFront(Handler handler) { + Handler[] h = ArrayUtil.prependToArray( + handler, this.handlers.getHandlers(), Handler.class); + handlers.setHandlers(h); + } + + /** + * Add the given handler to the end of the list of handlers. + * + * @param handler The handler to add + */ + public void addHandlerAtEnd(Handler handler) { + handlers.addHandler(handler); + } + @Override public void addFilter(String name, String classname, Map parameters) { @@ -727,12 +785,14 @@ public final class HttpServer2 implements FilterContainer { FilterMapping fmap = getFilterMapping(name, USER_FACING_URLS); defineFilter(webAppContext, filterHolder, fmap); LOG.info( - "Added filter " + name + " (class=" + classname + ") to context " + webAppContext.getDisplayName()); + "Added filter " + name + " (class=" + classname + ") to context " + + webAppContext.getDisplayName()); final String[] ALL_URLS = { "/*" }; fmap = getFilterMapping(name, ALL_URLS); - for (Map.Entry e : defaultContexts.entrySet()) { + for (Map.Entry e + : defaultContexts.entrySet()) { if (e.getValue()) { - Context ctx = e.getKey(); + ServletContextHandler ctx = e.getKey(); defineFilter(ctx, filterHolder, fmap); LOG.info("Added filter " + name + " (class=" + classname + ") to context " + ctx.getDisplayName()); @@ -748,7 +808,7 @@ public final class HttpServer2 implements FilterContainer { FilterHolder filterHolder = getFilterHolder(name, classname, parameters); FilterMapping fmap = getFilterMapping(name, ALL_URLS); defineFilter(webAppContext, filterHolder, fmap); - for (Context ctx : defaultContexts.keySet()) { + for (ServletContextHandler ctx : defaultContexts.keySet()) { defineFilter(ctx, filterHolder, fmap); } LOG.info("Added global filter '" + name + "' (class=" + classname + ")"); @@ -757,7 +817,7 @@ public final class HttpServer2 implements FilterContainer { /** * Define a filter for a context and set up default url mappings. */ - public static void defineFilter(Context ctx, String name, + public static void defineFilter(ServletContextHandler ctx, String name, String classname, Map parameters, String[] urls) { FilterHolder filterHolder = getFilterHolder(name, classname, parameters); FilterMapping fmap = getFilterMapping(name, urls); @@ -767,8 +827,8 @@ public final class HttpServer2 implements FilterContainer { /** * Define a filter for a context and set up default url mappings. */ - private static void defineFilter(Context ctx, FilterHolder holder, - FilterMapping fmap) { + private static void defineFilter(ServletContextHandler ctx, + FilterHolder holder, FilterMapping fmap) { ServletHandler handler = ctx.getServletHandler(); handler.addFilter(holder, fmap); } @@ -776,7 +836,7 @@ public final class HttpServer2 implements FilterContainer { private static FilterMapping getFilterMapping(String name, String[] urls) { FilterMapping fmap = new FilterMapping(); fmap.setPathSpecs(urls); - fmap.setDispatches(Handler.ALL); + fmap.setDispatches(FilterMapping.ALL); fmap.setFilterName(name); return fmap; } @@ -786,7 +846,9 @@ public final class HttpServer2 implements FilterContainer { FilterHolder holder = new FilterHolder(); holder.setName(name); holder.setClassName(classname); - holder.setInitParameters(parameters); + if (parameters != null) { + holder.setInitParameters(parameters); + } return holder; } @@ -796,13 +858,13 @@ public final class HttpServer2 implements FilterContainer { * @param webAppCtx The WebApplicationContext to add to */ protected void addFilterPathMapping(String pathSpec, - Context webAppCtx) { + ServletContextHandler webAppCtx) { ServletHandler handler = webAppCtx.getServletHandler(); for(String name : filterNames) { FilterMapping fmap = new FilterMapping(); fmap.setPathSpec(pathSpec); fmap.setFilterName(name); - fmap.setDispatches(Handler.ALL); + fmap.setDispatches(FilterMapping.ALL); handler.addFilterMapping(fmap); } } @@ -841,23 +903,23 @@ public final class HttpServer2 implements FilterContainer { */ @Deprecated public int getPort() { - return webServer.getConnectors()[0].getLocalPort(); + return ((ServerConnector)webServer.getConnectors()[0]).getLocalPort(); } /** * Get the address that corresponds to a particular connector. * * @return the corresponding address for the connector, or null if there's no - * such connector or the connector is not bounded. + * such connector or the connector is not bounded or was closed. */ public InetSocketAddress getConnectorAddress(int index) { Preconditions.checkArgument(index >= 0); if (index > webServer.getConnectors().length) return null; - Connector c = webServer.getConnectors()[index]; - if (c.getLocalPort() == -1) { - // The connector is not bounded + ServerConnector c = (ServerConnector)webServer.getConnectors()[index]; + if (c.getLocalPort() == -1 || c.getLocalPort() == -2) { + // The connector is not bounded or was closed return null; } @@ -907,8 +969,8 @@ public final class HttpServer2 implements FilterContainer { throw ex; } // Make sure there is no handler failures. - Handler[] handlers = webServer.getHandlers(); - for (Handler handler : handlers) { + Handler[] hs = webServer.getHandlers(); + for (Handler handler : hs) { if (handler.isFailed()) { throw new IOException( "Problem in starting http server. Server handlers failed"); @@ -944,9 +1006,10 @@ public final class HttpServer2 implements FilterContainer { * @throws Exception */ void openListeners() throws Exception { - for (Connector listener : listeners) { - if (listener.getLocalPort() != -1) { - // This listener is either started externally or has been bound + for (ServerConnector listener : listeners) { + if (listener.getLocalPort() != -1 && listener.getLocalPort() != -2) { + // This listener is either started externally or has been bound or was + // closed continue; } int port = listener.getPort(); @@ -978,7 +1041,7 @@ public final class HttpServer2 implements FilterContainer { */ public void stop() throws Exception { MultiException exception = null; - for (Connector c : listeners) { + for (ServerConnector c : listeners) { try { c.close(); } catch (Exception e) { @@ -1042,7 +1105,7 @@ public final class HttpServer2 implements FilterContainer { .append(isAlive() ? STATE_DESCRIPTION_ALIVE : STATE_DESCRIPTION_NOT_LIVE) .append("), listening at:"); - for (Connector l : listeners) { + for (ServerConnector l : listeners) { sb.append(l.getHost()).append(":").append(l.getPort()).append("/,"); } return sb.toString(); @@ -1300,10 +1363,10 @@ public final class HttpServer2 implements FilterContainer { */ private String inferMimeType(ServletRequest request) { String path = ((HttpServletRequest)request).getRequestURI(); - ContextHandler.SContext sContext = (ContextHandler.SContext)config.getServletContext(); - MimeTypes mimes = sContext.getContextHandler().getMimeTypes(); - Buffer mimeBuffer = mimes.getMimeByExtension(path); - return (mimeBuffer == null) ? null : mimeBuffer.toString(); + ServletContextHandler.Context sContext = + (ServletContextHandler.Context)config.getServletContext(); + String mime = sContext.getMimeType(path); + return (mime == null) ? null : mime; } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/JettyUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/JettyUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/JettyUtils.java new file mode 100644 index 0000000..29c0930 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/JettyUtils.java @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.http; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; + +/** + * Contains utility methods and constants relating to Jetty. + */ +@InterfaceAudience.Public +@InterfaceStability.Evolving +public final class JettyUtils { + public static final String UTF_8 = "charset=utf-8"; + public static final int HEADER_SIZE = 1024 * 64; + + private JettyUtils() { + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java deleted file mode 100644 index 7de689b..0000000 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java +++ /dev/null @@ -1,58 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.security.ssl; - -import java.io.IOException; -import java.util.ArrayList; - -import javax.net.ssl.SSLEngine; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.mortbay.jetty.security.SslSelectChannelConnector; - -/** - * This subclass of the Jetty SslSelectChannelConnector exists solely to - * control the TLS protocol versions allowed. This is fallout from the - * POODLE vulnerability (CVE-2014-3566), which requires that SSLv3 be disabled. - * Only TLS 1.0 and later protocols are allowed. - */ -@InterfaceAudience.Private -public class SslSelectChannelConnectorSecure extends SslSelectChannelConnector { - - public SslSelectChannelConnectorSecure() { - super(); - } - - /** - * Disable SSLv3 protocol. - */ - @Override - protected SSLEngine createSSLEngine() throws IOException { - SSLEngine engine = super.createSSLEngine(); - ArrayList nonSSLProtocols = new ArrayList(); - for (String p : engine.getEnabledProtocols()) { - if (!p.contains("SSLv3")) { - nonSSLProtocols.add(p); - } - } - engine.setEnabledProtocols(nonSSLProtocols.toArray( - new String[nonSSLProtocols.size()])); - return engine; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java index 60035be..53089ed 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java @@ -31,7 +31,7 @@ import javax.ws.rs.core.HttpHeaders; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java index 35fd9be..f0c00c4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java @@ -32,7 +32,7 @@ import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import org.mortbay.log.Log; +import org.eclipse.jetty.util.log.Log; /** *

@@ -797,7 +797,7 @@ public abstract class FSMainOperationsBaseTest extends FileSystemTestHelper { rename(src, dst, false, false, false, Rename.NONE); Assert.fail("Should throw FileNotFoundException"); } catch (IOException e) { - Log.info("XXX", e); + Log.getLog().info("XXX", e); Assert.assertTrue(unwrapException(e) instanceof FileNotFoundException); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java index 81ca210..866c03e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java @@ -27,7 +27,7 @@ import org.apache.hadoop.fs.FsConstants; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.viewfs.ConfigUtil; import org.apache.hadoop.util.Shell; -import org.mortbay.log.Log; +import org.eclipse.jetty.util.log.Log; /** @@ -84,7 +84,7 @@ public class ViewFileSystemTestSetup { FileSystem fsView = FileSystem.get(FsConstants.VIEWFS_URI, conf); fsView.setWorkingDirectory(new Path(wdDir)); // in case testdir relative to wd. - Log.info("Working dir is: " + fsView.getWorkingDirectory()); + Log.getLog().info("Working dir is: " + fsView.getWorkingDirectory()); return fsView; } @@ -118,12 +118,12 @@ public class ViewFileSystemTestSetup { } else { // home dir is at root. Just link the home dir itse URI linkTarget = fsTarget.makeQualified(new Path(homeDir)).toUri(); ConfigUtil.addLink(conf, homeDir, linkTarget); - Log.info("Added link for home dir " + homeDir + "->" + linkTarget); + Log.getLog().info("Added link for home dir " + homeDir + "->" + linkTarget); } // Now set the root of the home dir for viewfs String homeDirRoot = fsTarget.getHomeDirectory().getParent().toUri().getPath(); ConfigUtil.setHomeDirConf(conf, homeDirRoot); - Log.info("Home dir base for viewfs" + homeDirRoot); + Log.getLog().info("Home dir base for viewfs" + homeDirRoot); } /* @@ -138,7 +138,7 @@ public class ViewFileSystemTestSetup { String firstComponent = path.substring(0, indexOfEnd); URI linkTarget = fsTarget.makeQualified(new Path(firstComponent)).toUri(); ConfigUtil.addLink(conf, firstComponent, linkTarget); - Log.info("Added link for " + info + " " + Log.getLog().info("Added link for " + info + " " + firstComponent + "->" + linkTarget); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java index 92bcbc3..9b7e17f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java @@ -26,7 +26,7 @@ import org.apache.hadoop.fs.FsConstants; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.viewfs.ConfigUtil; import org.apache.hadoop.util.Shell; -import org.mortbay.log.Log; +import org.eclipse.jetty.util.log.Log; /** @@ -82,7 +82,7 @@ public class ViewFsTestSetup { FileContext fc = FileContext.getFileContext(FsConstants.VIEWFS_URI, conf); fc.setWorkingDirectory(new Path(wdDir)); // in case testdir relative to wd. - Log.info("Working dir is: " + fc.getWorkingDirectory()); + Log.getLog().info("Working dir is: " + fc.getWorkingDirectory()); //System.out.println("SRCOfTests = "+ getTestRootPath(fc, "test")); //System.out.println("TargetOfTests = "+ targetOfTests.toUri()); return fc; @@ -107,12 +107,12 @@ public class ViewFsTestSetup { } else { // home dir is at root. Just link the home dir itse URI linkTarget = fsTarget.makeQualified(new Path(homeDir)).toUri(); ConfigUtil.addLink(conf, homeDir, linkTarget); - Log.info("Added link for home dir " + homeDir + "->" + linkTarget); + Log.getLog().info("Added link for home dir " + homeDir + "->" + linkTarget); } // Now set the root of the home dir for viewfs String homeDirRoot = fsTarget.getHomeDirectory().getParent().toUri().getPath(); ConfigUtil.setHomeDirConf(conf, homeDirRoot); - Log.info("Home dir base for viewfs" + homeDirRoot); + Log.getLog().info("Home dir base for viewfs" + homeDirRoot); } /* @@ -128,7 +128,7 @@ public class ViewFsTestSetup { String firstComponent = path.substring(0, indexOfEnd); URI linkTarget = fsTarget.makeQualified(new Path(firstComponent)).toUri(); ConfigUtil.addLink(conf, firstComponent, linkTarget); - Log.info("Added link for " + info + " " + Log.getLog().info("Added link for " + info + " " + firstComponent + "->" + linkTarget); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestAuthenticationSessionCookie.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestAuthenticationSessionCookie.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestAuthenticationSessionCookie.java index c51f1e8..44338da 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestAuthenticationSessionCookie.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestAuthenticationSessionCookie.java @@ -22,7 +22,7 @@ import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Test; -import org.mortbay.log.Log; +import org.eclipse.jetty.util.log.Log; import javax.servlet.*; import javax.servlet.http.HttpServletResponse; @@ -33,6 +33,7 @@ import java.net.HttpURLConnection; import java.net.URI; import java.net.URL; import java.net.HttpCookie; +import java.util.HashMap; import java.util.List; public class TestAuthenticationSessionCookie { @@ -71,7 +72,7 @@ public class TestAuthenticationSessionCookie { @Override public void initFilter(FilterContainer container, Configuration conf) { container.addFilter("DummyAuth", DummyAuthenticationFilter.class - .getName(), null); + .getName(), new HashMap<>()); } } @@ -93,7 +94,7 @@ public class TestAuthenticationSessionCookie { @Override public void initFilter(FilterContainer container, Configuration conf) { container.addFilter("Dummy2Auth", Dummy2AuthenticationFilter.class - .getName(), null); + .getName(), new HashMap<>()); } } @@ -149,7 +150,7 @@ public class TestAuthenticationSessionCookie { String header = conn.getHeaderField("Set-Cookie"); List cookies = HttpCookie.parse(header); Assert.assertTrue(!cookies.isEmpty()); - Log.info(header); + Log.getLog().info(header); Assert.assertFalse(header.contains("; Expires=")); Assert.assertTrue("token".equals(cookies.get(0).getValue())); } @@ -171,7 +172,7 @@ public class TestAuthenticationSessionCookie { String header = conn.getHeaderField("Set-Cookie"); List cookies = HttpCookie.parse(header); Assert.assertTrue(!cookies.isEmpty()); - Log.info(header); + Log.getLog().info(header); Assert.assertTrue(header.contains("; Expires=")); Assert.assertTrue("token".equals(cookies.get(0).getValue())); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java index 23e0d3e..212807f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java @@ -18,9 +18,9 @@ package org.apache.hadoop.http; import org.apache.log4j.Logger; +import org.eclipse.jetty.server.NCSARequestLog; +import org.eclipse.jetty.server.RequestLog; import org.junit.Test; -import org.mortbay.jetty.NCSARequestLog; -import org.mortbay.jetty.RequestLog; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java index 98f0a0e..a36e8ca 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java @@ -28,6 +28,8 @@ import org.apache.hadoop.security.Groups; import org.apache.hadoop.security.ShellBasedUnixGroupsMapping; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; +import org.eclipse.jetty.server.ServerConnector; +import org.eclipse.jetty.util.ajax.JSON; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; @@ -36,8 +38,6 @@ import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.Mockito; import org.mockito.internal.util.reflection.Whitebox; -import org.mortbay.jetty.Connector; -import org.mortbay.util.ajax.JSON; import javax.servlet.Filter; import javax.servlet.FilterChain; @@ -50,6 +50,7 @@ import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.core.MediaType; import java.io.IOException; import java.io.PrintWriter; import java.net.HttpURLConnection; @@ -81,6 +82,7 @@ public class TestHttpServer extends HttpServerFunctionalTest { public void doGet(HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { + response.setContentType(MediaType.TEXT_PLAIN + "; " + JettyUtils.UTF_8); PrintWriter out = response.getWriter(); Map params = request.getParameterMap(); SortedSet keys = new TreeSet(params.keySet()); @@ -108,6 +110,7 @@ public class TestHttpServer extends HttpServerFunctionalTest { public void doGet(HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { + response.setContentType(MediaType.TEXT_PLAIN + "; " + JettyUtils.UTF_8); PrintWriter out = response.getWriter(); SortedSet sortedKeys = new TreeSet(); Enumeration keys = request.getParameterNames(); @@ -130,7 +133,7 @@ public class TestHttpServer extends HttpServerFunctionalTest { public void doGet(HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { - response.setContentType("text/html"); + response.setContentType(MediaType.TEXT_HTML + "; " + JettyUtils.UTF_8); PrintWriter out = response.getWriter(); out.print("hello world"); out.close(); @@ -222,7 +225,8 @@ public class TestHttpServer extends HttpServerFunctionalTest { conn = (HttpURLConnection)servletUrl.openConnection(); conn.connect(); assertEquals(200, conn.getResponseCode()); - assertEquals("text/plain; charset=utf-8", conn.getContentType()); + assertEquals(MediaType.TEXT_PLAIN + ";" + JettyUtils.UTF_8, + conn.getContentType()); // We should ignore parameters for mime types - ie a parameter // ending in .css should not change mime type @@ -230,14 +234,16 @@ public class TestHttpServer extends HttpServerFunctionalTest { conn = (HttpURLConnection)servletUrl.openConnection(); conn.connect(); assertEquals(200, conn.getResponseCode()); - assertEquals("text/plain; charset=utf-8", conn.getContentType()); + assertEquals(MediaType.TEXT_PLAIN + ";" + JettyUtils.UTF_8, + conn.getContentType()); // Servlets that specify text/html should get that content type servletUrl = new URL(baseUrl, "/htmlcontent"); conn = (HttpURLConnection)servletUrl.openConnection(); conn.connect(); assertEquals(200, conn.getResponseCode()); - assertEquals("text/html; charset=utf-8", conn.getContentType()); + assertEquals(MediaType.TEXT_HTML + ";" + JettyUtils.UTF_8, + conn.getContentType()); } @Test @@ -488,7 +494,7 @@ public class TestHttpServer extends HttpServerFunctionalTest { @SuppressWarnings("unchecked") private static Map parse(String jsonString) { - return (Map)JSON.parse(jsonString); + return (Map) JSON.parse(jsonString); } @Test public void testJersey() throws Exception { @@ -592,7 +598,7 @@ public class TestHttpServer extends HttpServerFunctionalTest { // not bound, ephemeral should return requested port (0 for ephemeral) List listeners = (List) Whitebox.getInternalState(server, "listeners"); - Connector listener = (Connector) listeners.get(0); + ServerConnector listener = (ServerConnector)listeners.get(0); assertEquals(port, listener.getPort()); // verify hostname is what was given http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java index b0fadcf..f58c230 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java @@ -146,9 +146,11 @@ public class TestServletFilter extends HttpServerFunctionalTest { } static public class ErrorFilter extends SimpleFilter { + static final String EXCEPTION_MESSAGE = + "Throwing the exception from Filter init"; @Override public void init(FilterConfig arg0) throws ServletException { - throw new ServletException("Throwing the exception from Filter init"); + throw new ServletException(EXCEPTION_MESSAGE); } /** Configuration for the filter */ @@ -174,7 +176,8 @@ public class TestServletFilter extends HttpServerFunctionalTest { http.start(); fail("expecting exception"); } catch (IOException e) { - assertTrue( e.getMessage().contains("Problem in starting http server. Server handlers failed")); + assertEquals("Problem starting http server", e.getMessage()); + assertEquals(ErrorFilter.EXCEPTION_MESSAGE, e.getCause().getMessage()); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java index f1313e2..607d17f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java @@ -32,7 +32,8 @@ import javax.ws.rs.core.Response; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.mortbay.util.ajax.JSON; +import org.apache.hadoop.http.JettyUtils; +import org.eclipse.jetty.util.ajax.JSON; /** * A simple Jersey resource class TestHttpServer. @@ -48,7 +49,7 @@ public class JerseyResource { @GET @Path("{" + PATH + ":.*}") - @Produces({MediaType.APPLICATION_JSON}) + @Produces({MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8}) public Response get( @PathParam(PATH) @DefaultValue("UNKNOWN_" + PATH) final String path, @QueryParam(OP) @DefaultValue("UNKNOWN_" + OP) final String op http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java index 73562b5..44241de 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java @@ -31,22 +31,22 @@ import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHand import org.apache.hadoop.security.authentication.util.KerberosUtil; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager; import org.codehaus.jackson.map.ObjectMapper; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.server.ServerConnector; +import org.eclipse.jetty.servlet.ServletContextHandler; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import org.mortbay.jetty.AbstractConnector; -import org.mortbay.jetty.Connector; -import org.mortbay.jetty.Server; -import org.mortbay.jetty.servlet.Context; -import org.mortbay.jetty.servlet.FilterHolder; -import org.mortbay.jetty.servlet.ServletHolder; +import org.eclipse.jetty.servlet.FilterHolder; +import org.eclipse.jetty.servlet.ServletHolder; import javax.security.auth.Subject; import javax.security.auth.kerberos.KerberosPrincipal; import javax.security.auth.login.AppConfigurationEntry; import javax.security.auth.login.Configuration; import javax.security.auth.login.LoginContext; +import javax.servlet.DispatcherType; import javax.servlet.Filter; import javax.servlet.FilterConfig; import javax.servlet.ServletException; @@ -66,6 +66,7 @@ import java.net.URL; import java.security.Principal; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; +import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -177,7 +178,7 @@ public class TestWebDelegationToken { protected Server createJettyServer() { try { jetty = new Server(0); - jetty.getConnectors()[0].setHost("localhost"); + ((ServerConnector)jetty.getConnectors()[0]).setHost("localhost"); return jetty; } catch (Exception ex) { throw new RuntimeException("Could not setup Jetty: " + ex.getMessage(), @@ -186,7 +187,7 @@ public class TestWebDelegationToken { } protected String getJettyURL() { - Connector c = jetty.getConnectors()[0]; + ServerConnector c = (ServerConnector)jetty.getConnectors()[0]; return "http://" + c.getHost() + ":" + c.getLocalPort(); } @@ -217,10 +218,11 @@ public class TestWebDelegationToken { @Test public void testRawHttpCalls() throws Exception { final Server jetty = createJettyServer(); - Context context = new Context(); + ServletContextHandler context = new ServletContextHandler(); context.setContextPath("/foo"); jetty.setHandler(context); - context.addFilter(new FilterHolder(AFilter.class), "/*", 0); + context.addFilter(new FilterHolder(AFilter.class), "/*", + EnumSet.of(DispatcherType.REQUEST)); context.addServlet(new ServletHolder(PingServlet.class), "/bar"); try { jetty.start(); @@ -337,10 +339,11 @@ public class TestWebDelegationToken { private void testDelegationTokenAuthenticatorCalls(final boolean useQS) throws Exception { final Server jetty = createJettyServer(); - Context context = new Context(); + ServletContextHandler context = new ServletContextHandler(); context.setContextPath("/foo"); jetty.setHandler(context); - context.addFilter(new FilterHolder(AFilter.class), "/*", 0); + context.addFilter(new FilterHolder(AFilter.class), "/*", + EnumSet.of(DispatcherType.REQUEST)); context.addServlet(new ServletHolder(PingServlet.class), "/bar"); try { @@ -446,10 +449,11 @@ public class TestWebDelegationToken { DummyDelegationTokenSecretManager secretMgr = new DummyDelegationTokenSecretManager(); final Server jetty = createJettyServer(); - Context context = new Context(); + ServletContextHandler context = new ServletContextHandler(); context.setContextPath("/foo"); jetty.setHandler(context); - context.addFilter(new FilterHolder(AFilter.class), "/*", 0); + context.addFilter(new FilterHolder(AFilter.class), "/*", + EnumSet.of(DispatcherType.REQUEST)); context.addServlet(new ServletHolder(PingServlet.class), "/bar"); try { secretMgr.startThreads(); @@ -525,10 +529,11 @@ public class TestWebDelegationToken { private void testDelegationTokenAuthenticatedURLWithNoDT( Class filterClass) throws Exception { final Server jetty = createJettyServer(); - Context context = new Context(); + ServletContextHandler context = new ServletContextHandler(); context.setContextPath("/foo"); jetty.setHandler(context); - context.addFilter(new FilterHolder(filterClass), "/*", 0); + context.addFilter(new FilterHolder(filterClass), "/*", + EnumSet.of(DispatcherType.REQUEST)); context.addServlet(new ServletHolder(UserServlet.class), "/bar"); try { @@ -594,10 +599,11 @@ public class TestWebDelegationToken { public void testFallbackToPseudoDelegationTokenAuthenticator() throws Exception { final Server jetty = createJettyServer(); - Context context = new Context(); + ServletContextHandler context = new ServletContextHandler(); context.setContextPath("/foo"); jetty.setHandler(context); - context.addFilter(new FilterHolder(PseudoDTAFilter.class), "/*", 0); + context.addFilter(new FilterHolder(PseudoDTAFilter.class), "/*", + EnumSet.of(DispatcherType.REQUEST)); context.addServlet(new ServletHolder(UserServlet.class), "/bar"); try { @@ -745,11 +751,11 @@ public class TestWebDelegationToken { Assert.assertTrue(testDir.mkdirs()); MiniKdc kdc = new MiniKdc(MiniKdc.createConf(), testDir); final Server jetty = createJettyServer(); - Context context = new Context(); + ServletContextHandler context = new ServletContextHandler(); context.setContextPath("/foo"); jetty.setHandler(context); - ((AbstractConnector)jetty.getConnectors()[0]).setResolveNames(true); - context.addFilter(new FilterHolder(KDTAFilter.class), "/*", 0); + context.addFilter(new FilterHolder(KDTAFilter.class), "/*", + EnumSet.of(DispatcherType.REQUEST)); context.addServlet(new ServletHolder(UserServlet.class), "/bar"); try { kdc.start(); @@ -824,10 +830,11 @@ public class TestWebDelegationToken { @Test public void testProxyUser() throws Exception { final Server jetty = createJettyServer(); - Context context = new Context(); + ServletContextHandler context = new ServletContextHandler(); context.setContextPath("/foo"); jetty.setHandler(context); - context.addFilter(new FilterHolder(PseudoDTAFilter.class), "/*", 0); + context.addFilter(new FilterHolder(PseudoDTAFilter.class), "/*", + EnumSet.of(DispatcherType.REQUEST)); context.addServlet(new ServletHolder(UserServlet.class), "/bar"); try { @@ -921,10 +928,11 @@ public class TestWebDelegationToken { @Test public void testHttpUGI() throws Exception { final Server jetty = createJettyServer(); - Context context = new Context(); + ServletContextHandler context = new ServletContextHandler(); context.setContextPath("/foo"); jetty.setHandler(context); - context.addFilter(new FilterHolder(PseudoDTAFilter.class), "/*", 0); + context.addFilter(new FilterHolder(PseudoDTAFilter.class), "/*", + EnumSet.of(DispatcherType.REQUEST)); context.addServlet(new ServletHolder(UGIServlet.class), "/bar"); try { @@ -980,12 +988,12 @@ public class TestWebDelegationToken { @Test public void testIpaddressCheck() throws Exception { final Server jetty = createJettyServer(); - ((AbstractConnector)jetty.getConnectors()[0]).setResolveNames(true); - Context context = new Context(); + ServletContextHandler context = new ServletContextHandler(); context.setContextPath("/foo"); jetty.setHandler(context); - context.addFilter(new FilterHolder(IpAddressBasedPseudoDTAFilter.class), "/*", 0); + context.addFilter(new FilterHolder(IpAddressBasedPseudoDTAFilter.class), "/*", + EnumSet.of(DispatcherType.REQUEST)); context.addServlet(new ServletHolder(UGIServlet.class), "/bar"); try { http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-kms/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-kms/pom.xml b/hadoop-common-project/hadoop-kms/pom.xml index f65e94a..3eaaf52 100644 --- a/hadoop-common-project/hadoop-kms/pom.xml +++ b/hadoop-common-project/hadoop-kms/pom.xml @@ -80,12 +80,12 @@ javax.servlet - servlet-api + javax.servlet-api provided - org.mortbay.jetty - jetty + org.eclipse.jetty + jetty-server test @@ -111,7 +111,7 @@ javax.servlet - servlet-api + javax.servlet-api javax.servlet @@ -122,19 +122,19 @@ jsp-api - org.mortbay.jetty - jetty + org.eclipse.jetty + jetty-server - org.mortbay.jetty + org.eclipse.jetty jetty-util - org.mortbay.jetty + org.eclipse.jetty jsp-api-2.1 - org.mortbay.jetty + org.eclipse.jetty servlet-api-2.5 @@ -178,7 +178,7 @@ compile - org.mortbay.jetty + org.eclipse.jetty jetty-util compile --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org