phoenix-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From els...@apache.org
Subject [1/4] phoenix git commit: PHOENIX-2792 Enable PQS to use the new SPNEGO support from Avatica
Date Tue, 21 Jun 2016 04:37:43 GMT
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 6fb29aac1 -> 32ada52d3
  refs/heads/4.x-HBase-1.0 cad2c8d69 -> cb3213147
  refs/heads/4.x-HBase-1.1 a96cc550c -> ac1f6efe3
  refs/heads/master 8b0b4fa25 -> a357bf275


PHOENIX-2792 Enable PQS to use the new SPNEGO support from Avatica


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a357bf27
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a357bf27
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a357bf27

Branch: refs/heads/master
Commit: a357bf275abb272f2cabc571551b618cb4744fd2
Parents: 8b0b4fa
Author: Josh Elser <elserj@apache.org>
Authored: Tue Jun 21 00:16:36 2016 -0400
Committer: Josh Elser <elserj@apache.org>
Committed: Tue Jun 21 00:17:08 2016 -0400

----------------------------------------------------------------------
 bin/sqlline-thin.py                             |  2 +-
 phoenix-queryserver-client/pom.xml              |  4 +
 .../queryserver/client/SqllineWrapper.java      | 87 ++++++++++++++++++++
 .../apache/phoenix/queryserver/server/Main.java | 86 +++++++++++++++----
 4 files changed, 163 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a357bf27/bin/sqlline-thin.py
----------------------------------------------------------------------
diff --git a/bin/sqlline-thin.py b/bin/sqlline-thin.py
index b37cd90..73b7b42 100755
--- a/bin/sqlline-thin.py
+++ b/bin/sqlline-thin.py
@@ -149,7 +149,7 @@ java_cmd = java + ' $PHOENIX_OPTS ' + \
     ' -cp "' + phoenix_utils.hbase_conf_dir + os.pathsep + phoenix_utils.phoenix_thin_client_jar
+ \
     os.pathsep + phoenix_utils.hadoop_conf + os.pathsep + phoenix_utils.hadoop_classpath
+ '" -Dlog4j.configuration=file:' + \
     os.path.join(phoenix_utils.current_dir, "log4j.properties") + \
-    " sqlline.SqlLine -d org.apache.phoenix.queryserver.client.Driver " + \
+    " org.apache.phoenix.queryserver.client.SqllineWrapper -d org.apache.phoenix.queryserver.client.Driver
" + \
     " -u \"jdbc:phoenix:thin:url=" + url + ";serialization=" + serialization + "\"" + \
     " -n none -p none --color=" + colorSetting + " --fastConnect=false --verbose=true " +
\
     " --isolation=TRANSACTION_READ_COMMITTED " + sqlfile

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a357bf27/phoenix-queryserver-client/pom.xml
----------------------------------------------------------------------
diff --git a/phoenix-queryserver-client/pom.xml b/phoenix-queryserver-client/pom.xml
index 0ae25f9..3fba5aa 100644
--- a/phoenix-queryserver-client/pom.xml
+++ b/phoenix-queryserver-client/pom.xml
@@ -139,5 +139,9 @@
       <groupId>sqlline</groupId>
       <artifactId>sqlline</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+    </dependency>
   </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a357bf27/phoenix-queryserver-client/src/main/java/org/apache/phoenix/queryserver/client/SqllineWrapper.java
----------------------------------------------------------------------
diff --git a/phoenix-queryserver-client/src/main/java/org/apache/phoenix/queryserver/client/SqllineWrapper.java
b/phoenix-queryserver-client/src/main/java/org/apache/phoenix/queryserver/client/SqllineWrapper.java
new file mode 100644
index 0000000..44cc0d3
--- /dev/null
+++ b/phoenix-queryserver-client/src/main/java/org/apache/phoenix/queryserver/client/SqllineWrapper.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.queryserver.client;
+
+import java.security.PrivilegedExceptionAction;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import sqlline.SqlLine;
+
+/**
+ * Utility class which automatically performs a Kerberos login and then launches sqlline.
Tries to
+ * make a pre-populated ticket cache (via kinit before launching) transparently work.
+ */
+public class SqllineWrapper {
+  public static final String HBASE_AUTHENTICATION_ATTR = "hbase.security.authentication";
+
+  static UserGroupInformation loginIfNecessary() {
+    // Try to avoid HBase dependency too. Sadly, we have to bring in all of hadoop-common
for this..
+    Configuration conf = new Configuration(false);
+    conf.addResource("hbase-site.xml");
+    if ("kerberos".equalsIgnoreCase(conf.get(HBASE_AUTHENTICATION_ATTR))) {
+      // sun.security.krb5.principal is the property for setting the principal name, if that
+      // isn't set, fall back to user.name and hope for the best.
+      String principal = System.getProperty("sun.security.krb5.principal", System.getProperty("user.name"));
+      try {
+        // We got hadoop-auth via hadoop-common, so might as well use it.
+        return UserGroupInformation.getUGIFromTicketCache(null, principal);
+      } catch (Exception e) {
+        throw new RuntimeException("Kerberos login failed using ticket cache. Did you kinit?",
e);
+      }
+    }
+    return null;
+  }
+
+  private static String[] updateArgsForKerberos(String[] origArgs) {
+    String[] newArgs = new String[origArgs.length];
+    for (int i = 0; i < origArgs.length; i++) {
+      String arg = origArgs[i];
+      newArgs[i] = arg;
+
+      if (arg.equals("-u")) {
+        // Get the JDBC url which is the next argument
+        i++;
+        arg = origArgs[i];
+        if (!arg.contains("authentication=")) {
+          arg = arg + ";authentication=SPNEGO";
+        }
+        newArgs[i] = arg;
+      }
+    }
+    return newArgs;
+  }
+
+  public static void main(String[] args) throws Exception {
+    UserGroupInformation ugi = loginIfNecessary();
+
+    if (null != ugi) {
+      final String[] updatedArgs = updateArgsForKerberos(args);
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+        @Override
+        public Void run() throws Exception {
+          SqlLine.main(updatedArgs);
+          return null;
+        }
+      });
+    } else {
+      SqlLine.main(args);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a357bf27/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/Main.java
----------------------------------------------------------------------
diff --git a/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/Main.java
b/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/Main.java
index 106d422..fc2ee34 100644
--- a/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/Main.java
+++ b/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/Main.java
@@ -22,6 +22,9 @@ import org.apache.calcite.avatica.Meta;
 import org.apache.calcite.avatica.remote.Driver;
 import org.apache.calcite.avatica.remote.LocalService;
 import org.apache.calcite.avatica.remote.Service;
+import org.apache.calcite.avatica.server.AvaticaHandler;
+import org.apache.calcite.avatica.server.AvaticaServerConfiguration;
+import org.apache.calcite.avatica.server.DoAsRemoteUserCallback;
 import org.apache.calcite.avatica.server.HandlerFactory;
 import org.apache.calcite.avatica.server.HttpServer;
 import org.apache.commons.logging.Log;
@@ -32,18 +35,24 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.net.DNS;
 import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesOptions;
-import org.eclipse.jetty.server.Handler;
 
+import java.io.File;
+import java.io.IOException;
 import java.lang.management.ManagementFactory;
 import java.lang.management.RuntimeMXBean;
+import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Set;
+import java.util.concurrent.Callable;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 
@@ -158,8 +167,10 @@ public final class Main extends Configured implements Tool, Runnable
{
   public int run(String[] args) throws Exception {
     logProcessInfo(getConf());
     try {
+      final boolean isKerberos = "kerberos".equalsIgnoreCase(getConf().get(QueryServices.QUERY_SERVER_HBASE_SECURITY_CONF_ATTRIB));
+
       // handle secure cluster credentials
-      if ("kerberos".equalsIgnoreCase(getConf().get(QueryServices.QUERY_SERVER_HBASE_SECURITY_CONF_ATTRIB)))
{
+      if (isKerberos) {
         String hostname = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
             getConf().get(QueryServices.QUERY_SERVER_DNS_INTERFACE_ATTRIB, "default"),
             getConf().get(QueryServices.QUERY_SERVER_DNS_NAMESERVER_ATTRIB, "default")));
@@ -171,6 +182,7 @@ public final class Main extends Configured implements Tool, Runnable {
             QueryServices.QUERY_SERVER_KERBEROS_PRINCIPAL_ATTRIB, hostname);
         LOG.info("Login successful.");
       }
+
       Class<? extends PhoenixMetaFactory> factoryClass = getConf().getClass(
           QueryServices.QUERY_SERVER_META_FACTORY_ATTRIB, PhoenixMetaFactoryImpl.class, PhoenixMetaFactory.class);
       int port = getConf().getInt(QueryServices.QUERY_SERVER_HTTP_PORT_ATTRIB,
@@ -179,9 +191,30 @@ public final class Main extends Configured implements Tool, Runnable
{
       PhoenixMetaFactory factory =
           factoryClass.getDeclaredConstructor(Configuration.class).newInstance(getConf());
       Meta meta = factory.create(Arrays.asList(args));
-      final HandlerFactory handlerFactory = new HandlerFactory();
       Service service = new LocalService(meta);
-      server = new HttpServer(port, getHandler(getConf(), service, handlerFactory));
+
+      // Start building the Avatica HttpServer
+      final HttpServer.Builder builder = new HttpServer.Builder().withPort(port)
+          .withHandler(service, getSerialization(getConf()));
+
+      // Enable SPNEGO and Impersonation when using Kerberos
+      if (isKerberos) {
+        UserGroupInformation ugi = UserGroupInformation.getLoginUser();
+
+        // Make sure the proxyuser configuration is up to date
+        ProxyUsers.refreshSuperUserGroupsConfiguration(getConf());
+
+        String keytabPath = getConf().get(QueryServices.QUERY_SERVER_KEYTAB_FILENAME_ATTRIB);
+        File keytab = new File(keytabPath);
+
+        // Enable SPNEGO and impersonation (through standard Hadoop configuration means)
+        builder.withSpnego(ugi.getUserName())
+            .withAutomaticLogin(keytab)
+            .withImpersonation(new PhoenixDoAsCallback(ugi));
+      }
+
+      // Build and start the HttpServer
+      server = builder.build();
       server.start();
       runningLatch.countDown();
       server.join();
@@ -194,14 +227,12 @@ public final class Main extends Configured implements Tool, Runnable
{
   }
 
   /**
-   * Instantiates the Handler for use by the Avatica (Jetty) server.
+   * Parses the serialization method from the configuration.
    *
-   * @param conf The configuration
-   * @param service The Avatica Service implementation
-   * @param handlerFactory Factory used for creating a Handler
-   * @return The Handler to use based on the configuration.
+   * @param conf The configuration to parse
+   * @return The Serialization method
    */
-  Handler getHandler(Configuration conf, Service service, HandlerFactory handlerFactory)
{
+  Driver.Serialization getSerialization(Configuration conf) {
     String serializationName = conf.get(QueryServices.QUERY_SERVER_SERIALIZATION_ATTRIB,
         QueryServicesOptions.DEFAULT_QUERY_SERVER_SERIALIZATION);
 
@@ -214,11 +245,7 @@ public final class Main extends Configured implements Tool, Runnable
{
       throw e;
     }
 
-    Handler handler = handlerFactory.getHandler(service, serialization);
-
-    LOG.info("Instantiated " + handler.getClass() + " for QueryServer");
-
-    return handler;
+    return serialization;
   }
 
   @Override public void run() {
@@ -229,6 +256,35 @@ public final class Main extends Configured implements Tool, Runnable
{
     }
   }
 
+  /**
+   * Callback to run the Avatica server action as the remote (proxy) user instead of the
server.
+   */
+  static class PhoenixDoAsCallback implements DoAsRemoteUserCallback {
+    private final UserGroupInformation serverUgi;
+
+    public PhoenixDoAsCallback(UserGroupInformation serverUgi) {
+      this.serverUgi = Objects.requireNonNull(serverUgi);
+    }
+
+    @Override
+    public <T> T doAsRemoteUser(String remoteUserName, String remoteAddress, final
Callable<T> action) throws Exception {
+      // Proxy this user on top of the server's user (the real user)
+      UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(remoteUserName,
serverUgi);
+
+      // Check if this user is allowed to be impersonated.
+      // Will throw AuthorizationException if the impersonation as this user is not allowed
+      ProxyUsers.authorize(proxyUser, remoteAddress);
+
+      // Execute the actual call as this proxy user
+      return proxyUser.doAs(new PrivilegedExceptionAction<T>() {
+        @Override
+        public T run() throws Exception {
+          return action.call();
+        }
+      });
+    }
+  }
+
   public static void main(String[] argv) throws Exception {
     int ret = ToolRunner.run(HBaseConfiguration.create(), new Main(), argv);
     System.exit(ret);


Mime
View raw message