hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From els...@apache.org
Subject [hbase] branch branch-2.1 updated: Revert "Revert "HBASE-17115 Define UI admins via an ACL""
Date Wed, 29 Jan 2020 22:12:17 GMT
This is an automated email from the ASF dual-hosted git repository.

elserj pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2.1 by this push:
     new 0bf92e5  Revert "Revert "HBASE-17115 Define UI admins via an ACL""
0bf92e5 is described below

commit 0bf92e5b9494f6822b6037a04b1e350aea5c6ca3
Author: Josh Elser <elserj@apache.org>
AuthorDate: Wed Jan 29 14:15:44 2020 -0500

    Revert "Revert "HBASE-17115 Define UI admins via an ACL""
    
    This reverts commit b46ad37ad80e7804ed441dc3fd984622bb1436c6.
    
    This re-applies HBASE-17115 which I accidentally pushed before it was ready.
---
 .../hadoop/hbase/http/AdminAuthorizedFilter.java   |  65 +++
 .../org/apache/hadoop/hbase/http/HttpServer.java   | 137 ++++---
 .../org/apache/hadoop/hbase/http/InfoServer.java   |  73 +++-
 .../org/apache/hadoop/hbase/http/log/LogLevel.java |   9 +
 .../apache/hadoop/hbase/http/TestHttpServer.java   |  13 +-
 .../hadoop/hbase/http/TestSSLHttpServer.java       |   2 +-
 .../hadoop/hbase/http/TestSpnegoHttpServer.java    |   4 +-
 .../org/apache/hadoop/hbase/master/HMaster.java    |   2 +-
 .../hadoop/hbase/regionserver/HRegionServer.java   |   4 +-
 .../resources/hbase-webapps/master/snapshot.jsp    |   3 +-
 .../main/resources/hbase-webapps/master/table.jsp  |  14 +-
 .../hadoop/hbase/http/TestInfoServersACL.java      | 438 +++++++++++++++++++++
 src/main/asciidoc/_chapters/security.adoc          |  85 +++-
 13 files changed, 775 insertions(+), 74 deletions(-)

diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedFilter.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedFilter.java
new file mode 100644
index 0000000..215ff37
--- /dev/null
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedFilter.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import java.io.IOException;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.yetus.audience.InterfaceAudience;
+
+@InterfaceAudience.Private
+public class AdminAuthorizedFilter implements Filter {
+
+  private Configuration conf;
+  private AccessControlList adminsAcl;
+
+  @Override public void init(FilterConfig filterConfig) throws ServletException {
+    adminsAcl = (AccessControlList) filterConfig.getServletContext().getAttribute(
+        HttpServer.ADMINS_ACL);
+    conf = (Configuration) filterConfig.getServletContext().getAttribute(
+        HttpServer.CONF_CONTEXT_ATTRIBUTE);
+  }
+
+  @Override
+  public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
+      throws IOException, ServletException {
+    if (!(request instanceof HttpServletRequest) || !(response instanceof HttpServletResponse)) {
+      throw new UnsupportedOperationException("Only accepts HTTP");
+    }
+    HttpServletRequest httpReq = (HttpServletRequest) request;
+    HttpServletResponse httpResp = (HttpServletResponse) response;
+
+    if (!HttpServer.hasAdministratorAccess(conf, adminsAcl, httpReq, httpResp)) {
+      return;
+    }
+
+    chain.doFilter(request, response);
+  }
+
+  @Override public void destroy() {}
+}
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
index 7fba6a6..fb98700 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
@@ -36,6 +36,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.stream.Collectors;
+
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
 import javax.servlet.FilterConfig;
@@ -79,7 +80,6 @@ import org.eclipse.jetty.servlet.DefaultServlet;
 import org.eclipse.jetty.servlet.FilterHolder;
 import org.eclipse.jetty.servlet.FilterMapping;
 import org.eclipse.jetty.servlet.ServletContextHandler;
-import org.eclipse.jetty.servlet.ServletHandler;
 import org.eclipse.jetty.servlet.ServletHolder;
 import org.eclipse.jetty.util.MultiException;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
@@ -131,6 +131,13 @@ public class HttpServer implements FilterContainer {
       "signature.secret.file";
   public static final String HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_KEY =
       HTTP_AUTHENTICATION_PREFIX + HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX;
+  public static final String HTTP_SPNEGO_AUTHENTICATION_ADMIN_USERS_KEY =
+      HTTP_SPNEGO_AUTHENTICATION_PREFIX + "admin.users";
+  public static final String HTTP_SPNEGO_AUTHENTICATION_ADMIN_GROUPS_KEY =
+      HTTP_SPNEGO_AUTHENTICATION_PREFIX + "admin.groups";
+  public static final String HTTP_PRIVILEGED_CONF_KEY =
+      "hbase.security.authentication.ui.config.protected";
+  public static final boolean HTTP_PRIVILEGED_CONF_DEFAULT = false;
 
   // The ServletContext attribute where the daemon Configuration
   // gets stored.
@@ -171,6 +178,7 @@ public class HttpServer implements FilterContainer {
   protected final boolean findPort;
   protected final Map<ServletContextHandler, Boolean> defaultContexts = new HashMap<>();
   protected final List<String> filterNames = new ArrayList<>();
+  protected final boolean authenticationEnabled;
   static final String STATE_DESCRIPTION_ALIVE = " - alive";
   static final String STATE_DESCRIPTION_NOT_LIVE = " - not live";
 
@@ -377,11 +385,6 @@ public class HttpServer implements FilterContainer {
 
       HttpServer server = new HttpServer(this);
 
-      if (this.securityEnabled) {
-        server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey, kerberosNameRulesKey,
-            signatureSecretFileKey);
-      }
-
       for (URI ep : endpoints) {
         ServerConnector listener = null;
         String scheme = ep.getScheme();
@@ -530,11 +533,12 @@ public class HttpServer implements FilterContainer {
     this.adminsAcl = b.adminsAcl;
     this.webAppContext = createWebAppContext(b.name, b.conf, adminsAcl, appDir);
     this.findPort = b.findPort;
-    initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs);
+    this.authenticationEnabled = b.securityEnabled;
+    initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs, b);
   }
 
   private void initializeWebServer(String name, String hostName,
-      Configuration conf, String[] pathSpecs)
+      Configuration conf, String[] pathSpecs, HttpServer.Builder b)
       throws FileNotFoundException, IOException {
 
     Preconditions.checkNotNull(webAppContext);
@@ -557,6 +561,11 @@ public class HttpServer implements FilterContainer {
 
     webServer.setHandler(handlerCollection);
 
+    webAppContext.setAttribute(ADMINS_ACL, adminsAcl);
+
+    // Default apps need to be set first, so that all filters are applied to them.
+    // Because they're added to defaultContexts, we need them there before we start
+    // adding filters
     addDefaultApps(contexts, appDir, conf);
 
     addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
@@ -569,6 +578,12 @@ public class HttpServer implements FilterContainer {
         SecurityHeadersFilter.class.getName(),
         SecurityHeadersFilter.getDefaultParameters(conf));
 
+    // But security needs to be enabled prior to adding the other servlets
+    if (authenticationEnabled) {
+      initSpnego(conf, hostName, b.usernameConfKey, b.keytabConfKey, b.kerberosNameRulesKey,
+          b.signatureSecretFileKey);
+    }
+
     final FilterInitializer[] initializers = getFilterInitializers(conf);
     if (initializers != null) {
       conf = new Configuration(conf);
@@ -578,7 +593,7 @@ public class HttpServer implements FilterContainer {
       }
     }
 
-    addDefaultServlets(contexts);
+    addDefaultServlets(contexts, conf);
 
     if (pathSpecs != null) {
       for (String path : pathSpecs) {
@@ -656,7 +671,6 @@ public class HttpServer implements FilterContainer {
       }
       logContext.setDisplayName("logs");
       setContextAttributes(logContext, conf);
-      addNoCacheFilter(webAppContext);
       defaultContexts.put(logContext, true);
     }
     // set up the context for "/static/*"
@@ -676,24 +690,31 @@ public class HttpServer implements FilterContainer {
   /**
    * Add default servlets.
    */
-  protected void addDefaultServlets(ContextHandlerCollection contexts) throws IOException {
+  protected void addDefaultServlets(
+      ContextHandlerCollection contexts, Configuration conf) throws IOException {
     // set up default servlets
-    addServlet("stacks", "/stacks", StackServlet.class);
-    addServlet("logLevel", "/logLevel", LogLevel.Servlet.class);
+    addPrivilegedServlet("stacks", "/stacks", StackServlet.class);
+    addPrivilegedServlet("logLevel", "/logLevel", LogLevel.Servlet.class);
     // Hadoop3 has moved completely to metrics2, and  dropped support for Metrics v1's
     // MetricsServlet (see HADOOP-12504).  We'll using reflection to load if against hadoop2.
     // Remove when we drop support for hbase on hadoop2.x.
     try {
-      Class clz = Class.forName("org.apache.hadoop.metrics.MetricsServlet");
-      addServlet("metrics", "/metrics", clz);
+      Class<?> clz = Class.forName("org.apache.hadoop.metrics.MetricsServlet");
+      addPrivilegedServlet("metrics", "/metrics", clz.asSubclass(HttpServlet.class));
     } catch (Exception e) {
       // do nothing
     }
-    addServlet("jmx", "/jmx", JMXJsonServlet.class);
-    addServlet("conf", "/conf", ConfServlet.class);
+    addPrivilegedServlet("jmx", "/jmx", JMXJsonServlet.class);
+    // While we don't expect users to have sensitive information in their configuration, they
+    // might. Give them an option to not expose the service configuration to all users.
+    if (conf.getBoolean(HTTP_PRIVILEGED_CONF_KEY, HTTP_PRIVILEGED_CONF_DEFAULT)) {
+      addPrivilegedServlet("conf", "/conf", ConfServlet.class);
+    } else {
+      addUnprivilegedServlet("conf", "/conf", ConfServlet.class);
+    }
     final String asyncProfilerHome = ProfileServlet.getAsyncProfilerHome();
     if (asyncProfilerHome != null && !asyncProfilerHome.trim().isEmpty()) {
-      addServlet("prof", "/prof", ProfileServlet.class);
+      addPrivilegedServlet("prof", "/prof", ProfileServlet.class);
       Path tmpDir = Paths.get(ProfileServlet.OUTPUT_DIR);
       if (Files.notExists(tmpDir)) {
         Files.createDirectories(tmpDir);
@@ -703,7 +724,7 @@ public class HttpServer implements FilterContainer {
       genCtx.setResourceBase(tmpDir.toAbsolutePath().toString());
       genCtx.setDisplayName("prof-output");
     } else {
-      addServlet("prof", "/prof", ProfileServlet.DisabledServlet.class);
+      addUnprivilegedServlet("prof", "/prof", ProfileServlet.DisabledServlet.class);
       LOG.info("ASYNC_PROFILER_HOME environment variable and async.profiler.home system property " +
         "not specified. Disabling /prof endpoint.");
     }
@@ -735,30 +756,37 @@ public class HttpServer implements FilterContainer {
   }
 
   /**
-   * Add a servlet in the server.
+   * Adds a servlet in the server that any user can access. This method differs from
+   * {@link #addPrivilegedServlet(String, String, Class)} in that any authenticated user
+   * can interact with the servlet added by this method.
    * @param name The name of the servlet (can be passed as null)
    * @param pathSpec The path spec for the servlet
    * @param clazz The servlet class
    */
-  public void addServlet(String name, String pathSpec,
+  public void addUnprivilegedServlet(String name, String pathSpec,
       Class<? extends HttpServlet> clazz) {
-    addInternalServlet(name, pathSpec, clazz, false);
-    addFilterPathMapping(pathSpec, webAppContext);
+    addServletWithAuth(name, pathSpec, clazz, false);
   }
 
   /**
-   * Add an internal servlet in the server.
-   * Note: This method is to be used for adding servlets that facilitate
-   * internal communication and not for user facing functionality. For
-   * servlets added using this method, filters are not enabled.
-   *
-   * @param name The name of the servlet (can be passed as null)
-   * @param pathSpec The path spec for the servlet
-   * @param clazz The servlet class
+   * Adds a servlet in the server that only administrators can access. This method differs from
+   * {@link #addUnprivilegedServlet(String, String, Class)} in that only those authenticated user
+   * who are identified as administrators can interact with the servlet added by this method.
    */
-  public void addInternalServlet(String name, String pathSpec,
+  public void addPrivilegedServlet(String name, String pathSpec,
       Class<? extends HttpServlet> clazz) {
-    addInternalServlet(name, pathSpec, clazz, false);
+    addServletWithAuth(name, pathSpec, clazz, true);
+  }
+
+  /**
+   * Internal method to add a servlet to the HTTP server. Developers should not call this method
+   * directly, but invoke it via {@link #addUnprivilegedServlet(String, String, Class)} or
+   * {@link #addPrivilegedServlet(String, String, Class)}.
+   */
+  void addServletWithAuth(String name, String pathSpec,
+      Class<? extends HttpServlet> clazz, boolean requireAuthz) {
+    addInternalServlet(name, pathSpec, clazz, requireAuthz);
+    addFilterPathMapping(pathSpec, webAppContext);
   }
 
   /**
@@ -766,7 +794,7 @@ public class HttpServer implements FilterContainer {
    * protect with Kerberos authentication.
    * Note: This method is to be used for adding servlets that facilitate
    * internal communication and not for user facing functionality. For
-   +   * servlets added using this method, filters (except internal Kerberos
+   * servlets added using this method, filters (except internal Kerberos
    * filters) are not enabled.
    *
    * @param name The name of the servlet (can be passed as null)
@@ -774,23 +802,22 @@ public class HttpServer implements FilterContainer {
    * @param clazz The servlet class
    * @param requireAuth Require Kerberos authenticate to access servlet
    */
-  public void addInternalServlet(String name, String pathSpec,
-      Class<? extends HttpServlet> clazz, boolean requireAuth) {
+  void addInternalServlet(String name, String pathSpec,
+      Class<? extends HttpServlet> clazz, boolean requireAuthz) {
     ServletHolder holder = new ServletHolder(clazz);
     if (name != null) {
       holder.setName(name);
     }
-    webAppContext.addServlet(holder, pathSpec);
-
-    if(requireAuth && UserGroupInformation.isSecurityEnabled()) {
-       LOG.info("Adding Kerberos (SPNEGO) filter to " + name);
-       ServletHandler handler = webAppContext.getServletHandler();
-       FilterMapping fmap = new FilterMapping();
-       fmap.setPathSpec(pathSpec);
-       fmap.setFilterName(SPNEGO_FILTER);
-       fmap.setDispatches(FilterMapping.ALL);
-       handler.addFilterMapping(fmap);
+    if (authenticationEnabled && requireAuthz) {
+      FilterHolder filter = new FilterHolder(AdminAuthorizedFilter.class);
+      filter.setName(AdminAuthorizedFilter.class.getSimpleName());
+      FilterMapping fmap = new FilterMapping();
+      fmap.setPathSpec(pathSpec);
+      fmap.setDispatches(FilterMapping.ALL);
+      fmap.setFilterName(AdminAuthorizedFilter.class.getSimpleName());
+      webAppContext.getServletHandler().addFilter(filter, fmap);
     }
+    webAppContext.addServlet(holder, pathSpec);
   }
 
   @Override
@@ -1201,6 +1228,13 @@ public class HttpServer implements FilterContainer {
       HttpServletResponse response) throws IOException {
     Configuration conf =
         (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE);
+    AccessControlList acl = (AccessControlList) servletContext.getAttribute(ADMINS_ACL);
+
+    return hasAdministratorAccess(conf, acl, request, response);
+  }
+
+  public static boolean hasAdministratorAccess(Configuration conf, AccessControlList acl,
+      HttpServletRequest request, HttpServletResponse response) throws IOException {
     // If there is no authorization, anybody has administrator access.
     if (!conf.getBoolean(
         CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) {
@@ -1215,9 +1249,8 @@ public class HttpServer implements FilterContainer {
       return false;
     }
 
-    if (servletContext.getAttribute(ADMINS_ACL) != null &&
-        !userHasAdministratorAccess(servletContext, remoteUser)) {
-      response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User "
+    if (acl != null && !userHasAdministratorAccess(acl, remoteUser)) {
+      response.sendError(HttpServletResponse.SC_FORBIDDEN, "User "
           + remoteUser + " is unauthorized to access this page.");
       return false;
     }
@@ -1238,9 +1271,13 @@ public class HttpServer implements FilterContainer {
       String remoteUser) {
     AccessControlList adminsAcl = (AccessControlList) servletContext
         .getAttribute(ADMINS_ACL);
+    return userHasAdministratorAccess(adminsAcl, remoteUser);
+  }
+
+  public static boolean userHasAdministratorAccess(AccessControlList acl, String remoteUser) {
     UserGroupInformation remoteUserUGI =
         UserGroupInformation.createRemoteUser(remoteUser);
-    return adminsAcl != null && adminsAcl.isUserAllowed(remoteUserUGI);
+    return acl != null && acl.isUserAllowed(remoteUserUGI);
   }
 
   /**
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java
index 5fd6514..e50639a 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java
@@ -22,11 +22,15 @@ package org.apache.hadoop.hbase.http;
 import java.io.IOException;
 import java.net.URI;
 
+import javax.servlet.ServletContext;
 import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.security.authorize.AccessControlList;
 import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.conf.Configuration;
 
 /**
  * Create a Jetty embedded server to answer http requests. The primary goal
@@ -83,13 +87,59 @@ public class InfoServer {
         .setSignatureSecretFileKey(
             HttpServer.HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_KEY)
         .setSecurityEnabled(true);
+
+      // Set an admin ACL on sensitive webUI endpoints
+      AccessControlList acl = buildAdminAcl(c);
+      builder.setACL(acl);
     }
     this.httpServer = builder.build();
   }
 
+  /**
+   * Builds an ACL that will restrict the users who can issue commands to endpoints on the UI
+   * which are meant only for administrators.
+   */
+  AccessControlList buildAdminAcl(Configuration conf) {
+    final String userGroups = conf.get(HttpServer.HTTP_SPNEGO_AUTHENTICATION_ADMIN_USERS_KEY, null);
+    final String adminGroups = conf.get(
+        HttpServer.HTTP_SPNEGO_AUTHENTICATION_ADMIN_GROUPS_KEY, null);
+    if (userGroups == null && adminGroups == null) {
+      // Backwards compatibility - if the user doesn't have anything set, allow all users in.
+      return new AccessControlList("*", null);
+    }
+    return new AccessControlList(userGroups, adminGroups);
+  }
+
+  /**
+   * Explicitly invoke {@link #addPrivilegedServlet(String, String, Class)} or
+   * {@link #addUnprivilegedServlet(String, String, Class)} instead of this method.
+   * This method will add a servlet which any authenticated user can access.
+   *
+   * @deprecated Use {@link #addUnprivilegedServlet(String, String, Class)} or
+   *    {@link #addPrivilegedServlet(String, String, Class)} instead of this
+   *    method which does not state outwardly what kind of authz rules will
+   *    be applied to this servlet.
+   */
+  @Deprecated
   public void addServlet(String name, String pathSpec,
           Class<? extends HttpServlet> clazz) {
-      this.httpServer.addServlet(name, pathSpec, clazz);
+    addUnprivilegedServlet(name, pathSpec, clazz);
+  }
+
+  /**
+   * @see HttpServer#addUnprivilegedServlet(String, String, Class)
+   */
+  public void addUnprivilegedServlet(String name, String pathSpec,
+          Class<? extends HttpServlet> clazz) {
+    this.httpServer.addUnprivilegedServlet(name, pathSpec, clazz);
+  }
+
+  /**
+   * @see HttpServer#addPrivilegedServlet(String, String, Class)
+   */
+  public void addPrivilegedServlet(String name, String pathSpec,
+          Class<? extends HttpServlet> clazz) {
+    this.httpServer.addPrivilegedServlet(name, pathSpec, clazz);
   }
 
   public void setAttribute(String name, Object value) {
@@ -109,4 +159,23 @@ public class InfoServer {
     this.httpServer.stop();
   }
 
+
+  /**
+   * Returns true if and only if UI authentication (spnego) is enabled, UI authorization is enabled,
+   * and the requesting user is defined as an administrator. If the UI is set to readonly, this
+   * method always returns false.
+   */
+  public static boolean canUserModifyUI(
+      HttpServletRequest req, ServletContext ctx, Configuration conf) {
+    if (conf.getBoolean("hbase.master.ui.readonly", false)) {
+      return false;
+    }
+    String remoteUser = req.getRemoteUser();
+    if ("kerberos".equals(conf.get(HttpServer.HTTP_UI_AUTHENTICATION)) &&
+        conf.getBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false) &&
+        remoteUser != null) {
+      return HttpServer.userHasAdministratorAccess(ctx, remoteUser);
+    }
+    return false;
+  }
 }
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
index 2f62313..bb4ff86 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
@@ -31,6 +31,7 @@ import javax.servlet.http.HttpServletResponse;
 import org.apache.commons.logging.impl.Jdk14Logger;
 import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.hbase.http.HttpServer;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.ServletUtil;
 import org.apache.log4j.LogManager;
 import org.apache.yetus.audience.InterfaceAudience;
@@ -105,6 +106,14 @@ public final class LogLevel {
           response)) {
         return;
       }
+      // Disallow modification of the LogLevel if explicitly set to readonly
+      Configuration conf = (Configuration) getServletContext().getAttribute(
+          HttpServer.CONF_CONTEXT_ATTRIBUTE);
+      if (conf.getBoolean("hbase.master.ui.readonly", false)) {
+        response.sendError(HttpServletResponse.SC_FORBIDDEN, "Modification of HBase via"
+            + " the UI is disallowed in configuration.");
+        return;
+      }
 
       PrintWriter out = ServletUtil.initHTML(response, "Log Level");
       String logName = ServletUtil.getParameter(request, "log");
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
index 0f5b8a1..cf24ceb 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -157,10 +157,10 @@ public class TestHttpServer extends HttpServerFunctionalTest {
     Configuration conf = new Configuration();
     conf.setInt(HttpServer.HTTP_MAX_THREADS, MAX_THREADS);
     server = createTestServer(conf);
-    server.addServlet("echo", "/echo", EchoServlet.class);
-    server.addServlet("echomap", "/echomap", EchoMapServlet.class);
-    server.addServlet("htmlcontent", "/htmlcontent", HtmlContentServlet.class);
-    server.addServlet("longheader", "/longheader", LongHeaderServlet.class);
+    server.addUnprivilegedServlet("echo", "/echo", EchoServlet.class);
+    server.addUnprivilegedServlet("echomap", "/echomap", EchoMapServlet.class);
+    server.addUnprivilegedServlet("htmlcontent", "/htmlcontent", HtmlContentServlet.class);
+    server.addUnprivilegedServlet("longheader", "/longheader", LongHeaderServlet.class);
     server.addJerseyResourcePackage(
         JerseyResource.class.getPackage().getName(), "/jersey/*");
     server.start();
@@ -503,7 +503,8 @@ public class TestHttpServer extends HttpServerFunctionalTest {
     Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
     Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
     Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, response));
-    Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
+    Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_FORBIDDEN),
+            Mockito.anyString());
 
     //authorization ON & user NOT NULL & ACLs NOT NULL & user in in ACLs
     response = Mockito.mock(HttpServletResponse.class);
@@ -593,7 +594,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
             .addEndpoint(new URI("http://localhost:0"))
             .setFindPort(true).setConf(conf).build();
     myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
-    myServer.addServlet("echo", "/echo", EchoServlet.class);
+    myServer.addUnprivilegedServlet("echo", "/echo", EchoServlet.class);
     myServer.start();
 
     String serverURL = "http://"
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
index c62ca65..364ff3d 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
@@ -95,7 +95,7 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest {
         .trustStore(sslConf.get("ssl.server.truststore.location"),
             HBaseConfiguration.getPassword(sslConf, "ssl.server.truststore.password", null),
             sslConf.get("ssl.server.truststore.type", "jks")).build();
-    server.addServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
+    server.addUnprivilegedServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
     server.start();
     baseUrl = new URL("https://"
         + NetUtils.getHostPortString(server.getConnectorAddress(0)));
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
index 1262b0c..aef9ecb 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
@@ -109,7 +109,7 @@ public class TestSpnegoHttpServer extends HttpServerFunctionalTest {
     Configuration conf = buildSpnegoConfiguration(serverPrincipal, infoServerKeytab);
 
     server = createTestServerWithSecurity(conf);
-    server.addServlet("echo", "/echo", EchoServlet.class);
+    server.addUnprivilegedServlet("echo", "/echo", EchoServlet.class);
     server.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(), "/jersey/*");
     server.start();
     baseUrl = getServerURL(server);
@@ -253,7 +253,7 @@ public class TestSpnegoHttpServer extends HttpServerFunctionalTest {
     // Intentionally skip keytab and principal
 
     HttpServer customServer = createTestServerWithSecurity(conf);
-    customServer.addServlet("echo", "/echo", EchoServlet.class);
+    customServer.addUnprivilegedServlet("echo", "/echo", EchoServlet.class);
     customServer.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(), "/jersey/*");
     customServer.start();
   }
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 1e4028c..a61750c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -729,7 +729,7 @@ public class HMaster extends HRegionServer implements MasterServices {
 
   @Override
   protected void configureInfoServer() {
-    infoServer.addServlet("master-status", "/master-status", MasterStatusServlet.class);
+    infoServer.addUnprivilegedServlet("master-status", "/master-status", MasterStatusServlet.class);
     infoServer.setAttribute(MASTER, this);
     if (LoadBalancer.isTablesOnMaster(conf)) {
       super.configureInfoServer();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index ba2b646..08b3a98 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -764,7 +764,7 @@ public class HRegionServer extends HasThread implements
   }
 
   protected void configureInfoServer() {
-    infoServer.addServlet("rs-status", "/rs-status", RSStatusServlet.class);
+    infoServer.addUnprivilegedServlet("rs-status", "/rs-status", RSStatusServlet.class);
     infoServer.setAttribute(REGIONSERVER, this);
   }
 
@@ -2102,7 +2102,7 @@ public class HRegionServer extends HasThread implements
     while (true) {
       try {
         this.infoServer = new InfoServer(getProcessName(), addr, port, false, this.conf);
-        infoServer.addServlet("dump", "/dump", getDumpServlet());
+        infoServer.addPrivilegedServlet("dump", "/dump", getDumpServlet());
         configureInfoServer();
         this.infoServer.start();
         break;
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
index 1ea2b40..bd9ca4d 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
@@ -22,6 +22,7 @@
   import="org.apache.hadoop.conf.Configuration"
   import="org.apache.hadoop.hbase.client.Admin"
   import="org.apache.hadoop.hbase.client.SnapshotDescription"
+  import="org.apache.hadoop.hbase.http.InfoServer"
   import="org.apache.hadoop.hbase.master.HMaster"
   import="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
   import="org.apache.hadoop.util.StringUtils"
@@ -30,7 +31,7 @@
 <%
   HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
   Configuration conf = master.getConfiguration();
-  boolean readOnly = conf.getBoolean("hbase.master.ui.readonly", false);
+  boolean readOnly = !InfoServer.canUserModifyUI(request, getServletContext(), conf);
   String snapshotName = request.getParameter("name");
   SnapshotDescription snapshot = null;
   SnapshotInfo.SnapshotStats stats = null;
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
index 09c88bf..ae12c97 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
@@ -22,18 +22,21 @@
   import="static org.apache.commons.lang3.StringEscapeUtils.escapeXml"
   import="java.util.ArrayList"
   import="java.util.Collection"
-  import="java.util.Collections"
   import="java.util.LinkedHashMap"
   import="java.util.List"
   import="java.util.Map"
   import="java.util.TreeMap"
-  import=" java.util.concurrent.TimeUnit"
+  import="java.util.concurrent.TimeUnit"
   import="org.apache.commons.lang3.StringEscapeUtils"
   import="org.apache.hadoop.conf.Configuration"
   import="org.apache.hadoop.hbase.HColumnDescriptor"
   import="org.apache.hadoop.hbase.HConstants"
   import="org.apache.hadoop.hbase.HRegionLocation"
+  import="org.apache.hadoop.hbase.RegionMetrics"
+  import="org.apache.hadoop.hbase.RegionMetricsBuilder"
+  import="org.apache.hadoop.hbase.ServerMetrics"
   import="org.apache.hadoop.hbase.ServerName"
+  import="org.apache.hadoop.hbase.Size"
   import="org.apache.hadoop.hbase.TableName"
   import="org.apache.hadoop.hbase.TableNotFoundException"
   import="org.apache.hadoop.hbase.client.AsyncAdmin"
@@ -45,6 +48,7 @@
   import="org.apache.hadoop.hbase.client.RegionLocator"
   import="org.apache.hadoop.hbase.client.RegionReplicaUtil"
   import="org.apache.hadoop.hbase.client.Table"
+  import="org.apache.hadoop.hbase.http.InfoServer"
   import="org.apache.hadoop.hbase.master.HMaster"
   import="org.apache.hadoop.hbase.quotas.QuotaTableUtil"
   import="org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot"
@@ -57,10 +61,6 @@
 <%@ page import="org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos" %>
 <%@ page import="org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas" %>
 <%@ page import="org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota" %>
-<%@ page import="org.apache.hadoop.hbase.ServerMetrics" %>
-<%@ page import="org.apache.hadoop.hbase.RegionMetrics" %>
-<%@ page import="org.apache.hadoop.hbase.Size" %>
-<%@ page import="org.apache.hadoop.hbase.RegionMetricsBuilder" %>
 <%!
   /**
    * @return An empty region load stamped with the passed in <code>regionInfo</code>
@@ -86,7 +86,7 @@
   String tableHeader;
   boolean withReplica = false;
   boolean showFragmentation = conf.getBoolean("hbase.master.ui.fragmentation.enabled", false);
-  boolean readOnly = conf.getBoolean("hbase.master.ui.readonly", false);
+  boolean readOnly = !InfoServer.canUserModifyUI(request, getServletContext(), conf);
   int numMetaReplicas = conf.getInt(HConstants.META_REPLICAS_NUM,
                         HConstants.DEFAULT_META_REPLICA_NUM);
   Map<String, Integer> frags = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestInfoServersACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestInfoServersACL.java
new file mode 100644
index 0000000..fa5dea6
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestInfoServersACL.java
@@ -0,0 +1,438 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.security.PrivilegedExceptionAction;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.LocalHBaseCluster;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
+import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
+import org.apache.hadoop.hbase.security.token.TokenProvider;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.http.auth.AuthSchemeProvider;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.KerberosCredentials;
+import org.apache.http.client.config.AuthSchemes;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.config.Lookup;
+import org.apache.http.config.RegistryBuilder;
+import org.apache.http.impl.auth.SPNegoSchemeFactory;
+import org.apache.http.impl.client.BasicCredentialsProvider;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.util.EntityUtils;
+import org.ietf.jgss.GSSCredential;
+import org.ietf.jgss.GSSManager;
+import org.ietf.jgss.GSSName;
+import org.ietf.jgss.Oid;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Testing info servers for admin acl.
+ */
+@Category({ MiscTests.class, SmallTests.class })
+public class TestInfoServersACL {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+      HBaseClassTestRule.forClass(TestInfoServersACL.class);
+
+  private static final Logger LOG = LoggerFactory.getLogger(TestInfoServersACL.class);
+  private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
+  private static Configuration conf;
+
+  protected static String USERNAME;
+  private static LocalHBaseCluster CLUSTER;
+  private static final File KEYTAB_FILE = new File(UTIL.getDataTestDir("keytab").toUri().getPath());
+  private static MiniKdc KDC;
+  private static String HOST = "localhost";
+  private static String PRINCIPAL;
+  private static String HTTP_PRINCIPAL;
+
+  @Rule
+  public TestName name = new TestName();
+
+  // user/group present in hbase.admin.acl
+  private static final String USER_ADMIN_STR = "admin";
+
+  // user with no permissions
+  private static final String USER_NONE_STR = "none";
+
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    conf = UTIL.getConfiguration();
+    KDC = UTIL.setupMiniKdc(KEYTAB_FILE);
+    USERNAME = UserGroupInformation.getLoginUser().getShortUserName();
+    PRINCIPAL = USERNAME + "/" + HOST;
+    HTTP_PRINCIPAL = "HTTP/" + HOST;
+    // Create principals for services and the test users
+    KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL, HTTP_PRINCIPAL, USER_ADMIN_STR, USER_NONE_STR);
+    UTIL.startMiniZKCluster();
+
+    HBaseKerberosUtils.setSecuredConfiguration(conf,
+        PRINCIPAL + "@" + KDC.getRealm(), HTTP_PRINCIPAL + "@" + KDC.getRealm());
+    HBaseKerberosUtils.setSSLConfiguration(UTIL, TestInfoServersACL.class);
+
+    conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
+        TokenProvider.class.getName());
+    UTIL.startMiniDFSCluster(1);
+    Path rootdir = UTIL.getDataTestDirOnTestFS("TestInfoServersACL");
+    FSUtils.setRootDir(conf, rootdir);
+
+    // The info servers do not run in tests by default.
+    // Set them to ephemeral ports so they will start
+    // setup configuration
+    conf.setInt(HConstants.MASTER_INFO_PORT, 0);
+    conf.setInt(HConstants.REGIONSERVER_INFO_PORT, 0);
+
+    conf.set(HttpServer.HTTP_UI_AUTHENTICATION, "kerberos");
+    conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY, HTTP_PRINCIPAL);
+    conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY, KEYTAB_FILE.getAbsolutePath());
+
+    // ACL lists work only when "hadoop.security.authorization" is set to true
+    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
+    // only user admin will have acl access
+    conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_ADMIN_USERS_KEY, USER_ADMIN_STR);
+    //conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY, "");
+
+    CLUSTER = new LocalHBaseCluster(conf, 1);
+    CLUSTER.startup();
+  }
+
+  /**
+   * Helper method to shut down the cluster (if running)
+   */
+  @AfterClass
+  public static void shutDownMiniCluster() throws Exception {
+    if (CLUSTER != null) {
+      CLUSTER.shutdown();
+      CLUSTER.join();
+    }
+    if (KDC != null) {
+      KDC.stop();
+    }
+    UTIL.shutdownMiniCluster();
+  }
+
+  @Test
+  public void testAuthorizedUser() throws Exception {
+    UserGroupInformation admin = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        USER_ADMIN_STR, KEYTAB_FILE.getAbsolutePath());
+    admin.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override public Void run() throws Exception {
+        // Check the expected content is present in the http response
+        String expectedContent = "Get Log Level";
+        Pair<Integer,String> pair = getLogLevelPage();
+        assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue());
+        assertTrue("expected=" + expectedContent + ", content=" + pair.getSecond(),
+          pair.getSecond().contains(expectedContent));
+        return null;
+      }
+    });
+  }
+
+  @Test
+  public void testUnauthorizedUser() throws Exception {
+    UserGroupInformation nonAdmin = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        USER_NONE_STR, KEYTAB_FILE.getAbsolutePath());
+    nonAdmin.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override public Void run() throws Exception {
+        Pair<Integer,String> pair = getLogLevelPage();
+        assertEquals(HttpURLConnection.HTTP_FORBIDDEN, pair.getFirst().intValue());
+        return null;
+      }
+    });
+  }
+
+  @Test
+  public void testTableActionsAvailableForAdmins() throws Exception {
+    final String expectedAuthorizedContent = "Actions:";
+    UserGroupInformation admin = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        USER_ADMIN_STR, KEYTAB_FILE.getAbsolutePath());
+    admin.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override public Void run() throws Exception {
+        // Check the expected content is present in the http response
+        Pair<Integer,String> pair = getTablePage(TableName.META_TABLE_NAME);
+        assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue());
+        assertTrue("expected=" + expectedAuthorizedContent + ", content=" + pair.getSecond(),
+          pair.getSecond().contains(expectedAuthorizedContent));
+        return null;
+      }
+    });
+
+    UserGroupInformation nonAdmin = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        USER_NONE_STR, KEYTAB_FILE.getAbsolutePath());
+    nonAdmin.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override public Void run() throws Exception {
+        Pair<Integer,String> pair = getTablePage(TableName.META_TABLE_NAME);
+        assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue());
+        assertFalse("should not find=" + expectedAuthorizedContent + ", content=" +
+            pair.getSecond(), pair.getSecond().contains(expectedAuthorizedContent));
+        return null;
+      }
+    });
+  }
+
+  @Test
+  public void testLogsAvailableForAdmins() throws Exception {
+    final String expectedAuthorizedContent = "Directory: /logs/";
+    UserGroupInformation admin = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        USER_ADMIN_STR, KEYTAB_FILE.getAbsolutePath());
+    admin.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override public Void run() throws Exception {
+        // Check the expected content is present in the http response
+        Pair<Integer,String> pair = getLogsPage();
+        assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue());
+        assertTrue("expected=" + expectedAuthorizedContent + ", content=" + pair.getSecond(),
+          pair.getSecond().contains(expectedAuthorizedContent));
+        return null;
+      }
+    });
+
+    UserGroupInformation nonAdmin = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        USER_NONE_STR, KEYTAB_FILE.getAbsolutePath());
+    nonAdmin.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override public Void run() throws Exception {
+        Pair<Integer,String> pair = getLogsPage();
+        assertEquals(HttpURLConnection.HTTP_FORBIDDEN, pair.getFirst().intValue());
+        return null;
+      }
+    });
+  }
+
+  @Test
+  public void testDumpActionsAvailableForAdmins() throws Exception {
+    final String expectedAuthorizedContent = "Master status for";
+    UserGroupInformation admin = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        USER_ADMIN_STR, KEYTAB_FILE.getAbsolutePath());
+    admin.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override public Void run() throws Exception {
+        // Check the expected content is present in the http response
+        Pair<Integer,String> pair = getMasterDumpPage();
+        assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue());
+        assertTrue("expected=" + expectedAuthorizedContent + ", content=" + pair.getSecond(),
+          pair.getSecond().contains(expectedAuthorizedContent));
+        return null;
+      }
+    });
+
+    UserGroupInformation nonAdmin = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        USER_NONE_STR, KEYTAB_FILE.getAbsolutePath());
+    nonAdmin.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override public Void run() throws Exception {
+        Pair<Integer,String> pair = getMasterDumpPage();
+        assertEquals(HttpURLConnection.HTTP_FORBIDDEN, pair.getFirst().intValue());
+        return null;
+      }
+    });
+  }
+
+  @Test
+  public void testStackActionsAvailableForAdmins() throws Exception {
+    final String expectedAuthorizedContent = "Process Thread Dump";
+    UserGroupInformation admin = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        USER_ADMIN_STR, KEYTAB_FILE.getAbsolutePath());
+    admin.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override public Void run() throws Exception {
+        // Check the expected content is present in the http response
+        Pair<Integer,String> pair = getStacksPage();
+        assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue());
+        assertTrue("expected=" + expectedAuthorizedContent + ", content=" + pair.getSecond(),
+          pair.getSecond().contains(expectedAuthorizedContent));
+        return null;
+      }
+    });
+
+    UserGroupInformation nonAdmin = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        USER_NONE_STR, KEYTAB_FILE.getAbsolutePath());
+    nonAdmin.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override public Void run() throws Exception {
+        Pair<Integer,String> pair = getStacksPage();
+        assertEquals(HttpURLConnection.HTTP_FORBIDDEN, pair.getFirst().intValue());
+        return null;
+      }
+    });
+  }
+
+  @Test
+  public void testJmxAvailableForAdmins() throws Exception {
+    final String expectedAuthorizedContent = "Hadoop:service=HBase";
+    UserGroupInformation admin = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        USER_ADMIN_STR, KEYTAB_FILE.getAbsolutePath());
+    admin.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override public Void run() throws Exception {
+        // Check the expected content is present in the http response
+        Pair<Integer,String> pair = getJmxPage();
+        assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue());
+        assertTrue("expected=" + expectedAuthorizedContent + ", content=" + pair.getSecond(),
+          pair.getSecond().contains(expectedAuthorizedContent));
+        return null;
+      }
+    });
+
+    UserGroupInformation nonAdmin = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        USER_NONE_STR, KEYTAB_FILE.getAbsolutePath());
+    nonAdmin.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override public Void run() throws Exception {
+        Pair<Integer,String> pair = getJmxPage();
+        assertEquals(HttpURLConnection.HTTP_FORBIDDEN, pair.getFirst().intValue());
+        return null;
+      }
+    });
+  }
+
+  @Test
+  public void testMetricsAvailableForAdmins() throws Exception {
+    // Looks like there's nothing exported to this, but leave it since
+    // it's Hadoop2 only and will eventually be removed due to that.
+    final String expectedAuthorizedContent = "";
+    UserGroupInformation admin = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        USER_ADMIN_STR, KEYTAB_FILE.getAbsolutePath());
+    admin.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override public Void run() throws Exception {
+        // Check the expected content is present in the http response
+        Pair<Integer,String> pair = getMetricsPage();
+        if (HttpURLConnection.HTTP_NOT_FOUND == pair.getFirst()) {
+          // Not on hadoop 2
+          return null;
+        }
+        assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue());
+        assertTrue("expected=" + expectedAuthorizedContent + ", content=" + pair.getSecond(),
+          pair.getSecond().contains(expectedAuthorizedContent));
+        return null;
+      }
+    });
+
+    UserGroupInformation nonAdmin = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        USER_NONE_STR, KEYTAB_FILE.getAbsolutePath());
+    nonAdmin.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override public Void run() throws Exception {
+        Pair<Integer,String> pair = getMetricsPage();
+        if (HttpURLConnection.HTTP_NOT_FOUND == pair.getFirst()) {
+          // Not on hadoop 2
+          return null;
+        }
+        assertEquals(HttpURLConnection.HTTP_FORBIDDEN, pair.getFirst().intValue());
+        return null;
+      }
+    });
+  }
+
+  private String getInfoServerHostAndPort() {
+    return "http://localhost:" + CLUSTER.getActiveMaster().getInfoServer().getPort();
+  }
+
+  private Pair<Integer,String> getLogLevelPage() throws Exception {
+    // Build the url which we want to connect to
+    URL url = new URL(getInfoServerHostAndPort() + "/logLevel");
+    return getUrlContent(url);
+  }
+
+  private Pair<Integer,String> getTablePage(TableName tn) throws Exception {
+    URL url = new URL(getInfoServerHostAndPort() + "/table.jsp?name=" + tn.getNameAsString());
+    return getUrlContent(url);
+  }
+
+  private Pair<Integer,String> getLogsPage() throws Exception {
+    URL url = new URL(getInfoServerHostAndPort() + "/logs/");
+    return getUrlContent(url);
+  }
+
+  private Pair<Integer,String> getMasterDumpPage() throws Exception {
+    URL url = new URL(getInfoServerHostAndPort() + "/dump");
+    return getUrlContent(url);
+  }
+
+  private Pair<Integer,String> getStacksPage() throws Exception {
+    URL url = new URL(getInfoServerHostAndPort() + "/stacks");
+    return getUrlContent(url);
+  }
+
+  private Pair<Integer,String> getJmxPage() throws Exception {
+    URL url = new URL(getInfoServerHostAndPort() + "/jmx");
+    return getUrlContent(url);
+  }
+
+  private Pair<Integer,String> getMetricsPage() throws Exception {
+    URL url = new URL(getInfoServerHostAndPort() + "/metrics");
+    return getUrlContent(url);
+  }
+
+  /**
+   * Retrieves the content of the specified URL. The content will only be returned if the status
+   * code for the operation was HTTP 200/OK.
+   */
+  private Pair<Integer,String> getUrlContent(URL url) throws Exception {
+    try (CloseableHttpClient client = createHttpClient(
+        UserGroupInformation.getCurrentUser().getUserName())) {
+      CloseableHttpResponse resp = client.execute(new HttpGet(url.toURI()));
+      int code = resp.getStatusLine().getStatusCode();
+      if (code == HttpURLConnection.HTTP_OK) {
+        return new Pair<>(code, EntityUtils.toString(resp.getEntity()));
+      }
+      return new Pair<>(code, null);
+    }
+  }
+
+  private CloseableHttpClient createHttpClient(String clientPrincipal) throws Exception {
+    // Logs in with Kerberos via GSS
+    GSSManager gssManager = GSSManager.getInstance();
+    // jGSS Kerberos login constant
+    Oid oid = new Oid("1.2.840.113554.1.2.2");
+    GSSName gssClient = gssManager.createName(clientPrincipal, GSSName.NT_USER_NAME);
+    GSSCredential credential = gssManager.createCredential(
+        gssClient, GSSCredential.DEFAULT_LIFETIME, oid, GSSCredential.INITIATE_ONLY);
+
+    Lookup<AuthSchemeProvider> authRegistry = RegistryBuilder.<AuthSchemeProvider>create()
+        .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true, true)).build();
+
+    BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
+    credentialsProvider.setCredentials(AuthScope.ANY, new KerberosCredentials(credential));
+
+    return HttpClients.custom().setDefaultAuthSchemeRegistry(authRegistry)
+        .setDefaultCredentialsProvider(credentialsProvider).build();
+  }
+}
diff --git a/src/main/asciidoc/_chapters/security.adoc b/src/main/asciidoc/_chapters/security.adoc
index 41a9101..6d31a77 100644
--- a/src/main/asciidoc/_chapters/security.adoc
+++ b/src/main/asciidoc/_chapters/security.adoc
@@ -37,9 +37,11 @@ HBase adheres to the Apache Software Foundation's policy on reported vulnerabili
 If you wish to send an encrypted report, you can use the GPG details provided for the general ASF security list. This will likely increase the response time to your report.
 ====
 
+== Web UI Security
+
 HBase provides mechanisms to secure various components and aspects of HBase and how it relates to the rest of the Hadoop infrastructure, as well as clients and resources outside Hadoop.
 
-== Using Secure HTTP (HTTPS) for the Web UI
+=== Using Secure HTTP (HTTPS) for the Web UI
 
 A default HBase install uses insecure HTTP connections for Web UIs for the master and region servers.
 To enable secure HTTP (HTTPS) connections instead, set `hbase.ssl.enabled` to `true` in _hbase-site.xml_.
@@ -70,7 +72,7 @@ If you know how to fix this without opening a second port for HTTPS, patches are
 ====
 
 [[hbase.secure.spnego.ui]]
-== Using SPNEGO for Kerberos authentication with Web UIs
+=== Using SPNEGO for Kerberos authentication with Web UIs
 
 Kerberos-authentication to HBase Web UIs can be enabled via configuring SPNEGO with the `hbase.security.authentication.ui`
 property in _hbase-site.xml_. Enabling this authentication requires that HBase is also configured to use Kerberos authentication
@@ -122,6 +124,85 @@ A number of properties exist to configure SPNEGO authentication for the web serv
 </property>
 ----
 
+=== Defining administrators of the Web UI
+
+In the previous section, we cover how to enable authentication for the Web UI via SPNEGO.
+However, some portions of the Web UI could be used to impact the availability and performance
+of an HBase cluster. As such, it is desirable to ensure that only those with proper authority
+can interact with these sensitive endpoints.
+
+HBase allows the adminstrators to be defined via a list of usernames or groups in hbase-site.xml
+
+[source,xml]
+----
+<property>
+  <name>hbase.security.authentication.spnego.admin.users</name>
+  <value></value>
+</property>
+<property>
+  <name>hbase.security.authentication.spnego.admin.groups</name>
+  <value></value>
+</property>
+----
+
+The usernames are those which the Kerberos identity maps to, given the Hadoop `auth_to_local` rules
+in core-site.xml. The groups here are the Unix groups associated with the mapped usernames.
+
+Consider the following scenario to describe how the configuration properties operate. Consider
+three users which are defined in the Kerberos KDC:
+
+* `alice@COMPANY.COM`
+* `bob@COMPANY.COM`
+* `charlie@COMPANY.COM`
+
+The default Hadoop `auth_to_local` rules map these principals to the "shortname":
+
+* `alice`
+* `bob`
+* `charlie`
+
+Unix groups membership define that `alice` is a member of the group `admins`.
+`bob` and `charlie` are not members of the `admins` group.
+
+[source,xml]
+----
+<property>
+  <name>hbase.security.authentication.spnego.admin.users</name>
+  <value>charlie</value>
+</property>
+<property>
+  <name>hbase.security.authentication.spnego.admin.groups</name>
+  <value>admins</value>
+</property>
+----
+
+Given the above configuration, `alice` is allowed to access sensitive endpoints in the Web UI
+as she is a member of the `admins` group. `charlie` is also allowed to access sensitive endpoints
+because he is explicitly listed as an admin in the configuration. `bob` is not allowed to access
+sensitive endpoints because he is not a member of the `admins` group nor is listed as an explicit
+admin user via `hbase.security.authentication.spnego.admin.users`, but can still use any
+non-sensitive endpoints in the Web UI.
+
+If it doesn't go without saying: non-authenticated users cannot access any part of the Web UI.
+
+=== Other UI security-related configuration
+
+While it is a clear anti-pattern for HBase developers, the developers acknowledge that the HBase
+configuration (including Hadoop configuration files) may contain sensitive information. As such,
+a user may find that they do not want to expose the HBase service-level configuration to all
+authenticated users. They may configure HBase to require a user must be an admin to access
+the service-level configuration via the HBase UI. This configuration is *false* by default
+(any authenticated user may access the configuration).
+
+Users who wish to change this would set the following in their hbase-site.xml:
+[source,xml]
+----
+<property>
+  <name>hbase.security.authentication.ui.config.protected</name>
+  <value>true</value>
+</property>
+----
+
 [[hbase.secure.configuration]]
 == Secure Client Access to Apache HBase
 


Mime
View raw message