hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From the...@apache.org
Subject svn commit: r1590825 - in /hive/trunk: itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/ itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/ itests/hive-unit/src/test/java/org/apache/hive/jdbc/ shims/common-secure/src/main/java/org/ap...
Date Mon, 28 Apr 2014 22:08:58 GMT
Author: thejas
Date: Mon Apr 28 22:08:58 2014
New Revision: 1590825

URL: http://svn.apache.org/r1590825
Log:
HIVE-6957 : SQL authorization does not work with HS2 binary mode and Kerberos auth (Thejas
Nair, reviewed by Vaibhav Gumashta)

Added:
    hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java
    hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthBinary.java
    hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthHttp.java
Modified:
    hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
    hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
    hive/trunk/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
    hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java

Added: hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java?rev=1590825&view=auto
==============================================================================
--- hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java
(added)
+++ hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java
Mon Apr 28 22:08:58 2014
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.minikdc;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+
+public abstract class JdbcWithMiniKdcSQLAuthTest {
+
+
+  private static MiniHS2 miniHS2 = null;
+  private static MiniHiveKdc miniHiveKdc = null;
+  private Connection hs2Conn;
+  protected static HiveConf hiveConf = new HiveConf();
+
+  public static void beforeTestBase() throws Exception {
+    System.err.println("Testing using HS2 mode:"
+        + hiveConf.getVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE));
+
+    Class.forName(MiniHS2.getJdbcDriverName());
+    hiveConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER,
+        SQLStdHiveAuthorizerFactory.class.getName());
+    hiveConf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER,
+        SessionStateUserAuthenticator.class.getName());
+    hiveConf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true);
+    hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
+    hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
+
+    miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf);
+    miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf);
+    miniHS2.start(new HashMap<String, String>());
+
+  }
+
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    if (hs2Conn != null) {
+      try {
+        hs2Conn.close();
+      } catch (Exception e) {
+        // Ignore shutdown errors since there are negative tests
+      }
+    }
+  }
+
+  @AfterClass
+  public static void afterTest() throws Exception {
+    miniHS2.stop();
+  }
+
+  @Test
+  public void testAuthorization1() throws Exception {
+
+    String tableName1 = "test_jdbc_sql_auth1";
+    String tableName2 = "test_jdbc_sql_auth2";
+    // using different code blocks so that jdbc variables are not accidently re-used
+    // between the actions. Different connection/statement object should be used for each
action.
+    {
+      // create tables as user1
+      Connection hs2Conn = getConnection(MiniHiveKdc.HIVE_TEST_USER_1);
+
+      Statement stmt = hs2Conn.createStatement();
+
+      // create tables
+      stmt.execute("create table " + tableName1 + "(i int) ");
+      stmt.execute("create table " + tableName2 + "(i int) ");
+      stmt.execute("grant select on table " + tableName2 + " to user "
+          + MiniHiveKdc.HIVE_TEST_USER_2);
+      stmt.close();
+      hs2Conn.close();
+    }
+
+    {
+      // try dropping table as user1 - should succeed
+      Connection hs2Conn = getConnection((MiniHiveKdc.HIVE_TEST_USER_1));
+      Statement stmt = hs2Conn.createStatement();
+      stmt.execute("drop table " + tableName1);
+    }
+
+    {
+      // try dropping table as user2 - should fail
+      Connection hs2Conn = getConnection((MiniHiveKdc.HIVE_TEST_USER_2));
+      try {
+        Statement stmt = hs2Conn.createStatement();
+        stmt.execute("drop table " + tableName2);
+        fail("Exception due to authorization failure is expected");
+      } catch (SQLException e) {
+        String msg = e.getMessage();
+        System.err.println("Got SQLException with message " + msg);
+        // check parts of the error, not the whole string so as not to tightly
+        // couple the error message with test
+        assertTrue("Checking permission denied error", msg.contains("user2"));
+        assertTrue("Checking permission denied error", msg.contains(tableName2));
+        assertTrue("Checking permission denied error", msg.contains("OBJECT OWNERSHIP"));
+      }
+    }
+
+    {
+      // try reading table2 as user2 - should succeed
+      Connection hs2Conn = getConnection((MiniHiveKdc.HIVE_TEST_USER_2));
+      Statement stmt = hs2Conn.createStatement();
+      stmt.execute(" desc " + tableName2);
+    }
+
+  }
+
+  private Connection getConnection(String userName) throws Exception {
+    miniHiveKdc.loginUser(userName);
+    return DriverManager.getConnection(miniHS2.getJdbcURL());
+  }
+
+
+
+}

Modified: hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java?rev=1590825&r1=1590824&r2=1590825&view=diff
==============================================================================
--- hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
(original)
+++ hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
Mon Apr 28 22:08:58 2014
@@ -28,26 +28,35 @@ import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.minikdc.MiniKdc;
 import org.apache.hadoop.security.GroupMappingServiceProvider;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
 
 import com.google.common.io.Files;
 
+/**
+ * Wrapper around Hadoop's MiniKdc for use in hive tests.
+ * Has functions to manager users and their keytabs. This includes a hive service principal,
+ * a superuser principal for testing proxy user privilegs.
+ * Has a set of default users that it initializes.
+ * See hive-minikdc/src/test/resources/core-site.xml for users granted proxy user privileges.
+ */
 public class MiniHiveKdc {
   public static String HIVE_SERVICE_PRINCIPAL = "hive";
   public static String HIVE_TEST_USER_1 = "user1";
   public static String HIVE_TEST_USER_2 = "user2";
   public static String HIVE_TEST_SUPER_USER = "superuser";
 
-  private MiniKdc miniKdc;
-  private File workDir;
-  private Configuration conf;
-  private Map<String, String> userPrincipals =
+  private final MiniKdc miniKdc;
+  private final File workDir;
+  private final Configuration conf;
+  private final Map<String, String> userPrincipals =
       new HashMap<String, String>();
-  private Properties kdcConf = MiniKdc.createConf();
+  private final Properties kdcConf = MiniKdc.createConf();
   private int keyTabCounter = 1;
 
   // hadoop group mapping that maps user to same group
@@ -112,6 +121,12 @@ public class MiniHiveKdc {
     userPrincipals.put(principal, keytab.getPath());
   }
 
+  /**
+   * Login the given principal, using corresponding keytab file from internal map
+   * @param principal
+   * @return
+   * @throws Exception
+   */
   public UserGroupInformation loginUser(String principal)
       throws Exception {
     ShimLoader.getHadoopShims().loginUserFromKeytab(principal,
@@ -147,5 +162,22 @@ public class MiniHiveKdc {
     return HIVE_TEST_USER_1;
   }
 
+  /**
+   * Create a MiniHS2 with the hive service principal and keytab in MiniHiveKdc
+   * @param miniHiveKdc
+   * @param hiveConf
+   * @return new MiniHS2 instance
+   * @throws Exception
+   */
+  public static MiniHS2 getMiniHS2WithKerb(MiniHiveKdc miniHiveKdc, HiveConf hiveConf) throws
Exception {
+    String hivePrincipal =
+        miniHiveKdc.getFullyQualifiedServicePrincipal(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL);
+    String hiveKeytab = miniHiveKdc.getKeyTabFile(
+        miniHiveKdc.getServicePrincipalForUser(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL));
+
+    return new MiniHS2.Builder().withConf(hiveConf).
+        withMiniKdc(hivePrincipal, hiveKeytab).build();
+  }
+
 
 }

Modified: hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java?rev=1590825&r1=1590824&r2=1590825&view=diff
==============================================================================
--- hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
(original)
+++ hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
Mon Apr 28 22:08:58 2014
@@ -18,7 +18,10 @@
 
 package org.apache.hive.minikdc;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.sql.Connection;
 import java.sql.DriverManager;
@@ -67,15 +70,10 @@ public class TestJdbcWithMiniKdc {
     Class.forName(MiniHS2.getJdbcDriverName());
     confOverlay.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname,
         SessionHookTest.class.getName());
+
     HiveConf hiveConf = new HiveConf();
     miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf);
-    String hivePrincipal =
-        miniHiveKdc.getFullyQualifiedServicePrincipal(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL);
-    String hiveKeytab = miniHiveKdc.getKeyTabFile(
-        miniHiveKdc.getServicePrincipalForUser(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL));
-
-    miniHS2 = new MiniHS2.Builder().withConf(new HiveConf()).
-        withMiniKdc(hivePrincipal, hiveKeytab).build();
+    miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf);
     miniHS2.start(confOverlay);
   }
 
@@ -107,8 +105,7 @@ public class TestJdbcWithMiniKdc {
   public void testConnection() throws Exception {
     miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_USER_1);
     hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL());
-    verifyProperty(SESSION_USER_NAME, miniHiveKdc.
-        getFullyQualifiedUserPrincipal(MiniHiveKdc.HIVE_TEST_USER_1));
+    verifyProperty(SESSION_USER_NAME, MiniHiveKdc.HIVE_TEST_USER_1);
   }
 
   /***

Added: hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthBinary.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthBinary.java?rev=1590825&view=auto
==============================================================================
--- hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthBinary.java
(added)
+++ hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthBinary.java
Mon Apr 28 22:08:58 2014
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.minikdc;
+
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
+import org.junit.BeforeClass;
+
+public class TestJdbcWithMiniKdcSQLAuthBinary extends JdbcWithMiniKdcSQLAuthTest {
+
+  @BeforeClass
+  public static void beforeTest() throws Exception {
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, MiniHS2.HS2_HTTP_MODE);
+    JdbcWithMiniKdcSQLAuthTest.beforeTestBase();
+
+  }
+
+}

Added: hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthHttp.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthHttp.java?rev=1590825&view=auto
==============================================================================
--- hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthHttp.java
(added)
+++ hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthHttp.java
Mon Apr 28 22:08:58 2014
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.minikdc;
+
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
+import org.junit.BeforeClass;
+
+public class TestJdbcWithMiniKdcSQLAuthHttp extends JdbcWithMiniKdcSQLAuthTest {
+
+  @BeforeClass
+  public static void beforeTest() throws Exception {
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, MiniHS2.HS2_HTTP_MODE);
+    JdbcWithMiniKdcSQLAuthTest.beforeTestBase();
+
+  }
+
+}

Modified: hive/trunk/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java?rev=1590825&r1=1590824&r2=1590825&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java (original)
+++ hive/trunk/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java Mon
Apr 28 22:08:58 2014
@@ -20,12 +20,8 @@ package org.apache.hive.jdbc.miniHS2;
 
 import java.io.File;
 import java.io.IOException;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
 import java.util.HashMap;
 import java.util.Map;
-import java.util.Properties;
 import java.util.concurrent.TimeoutException;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -39,7 +35,6 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim;
 import org.apache.hadoop.hive.shims.HadoopShims.MiniMrShim;
 import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hive.jdbc.HiveConnection;
 import org.apache.hive.service.Service;
 import org.apache.hive.service.cli.CLIServiceClient;
 import org.apache.hive.service.cli.SessionHandle;
@@ -51,19 +46,19 @@ import org.apache.hive.service.server.Hi
 import com.google.common.io.Files;
 
 public class MiniHS2 extends AbstractHiveService {
+  public static final String HS2_BINARY_MODE = "binary";
+  public static final String HS2_HTTP_MODE = "http";
   private static final String driverName = "org.apache.hive.jdbc.HiveDriver";
   private HiveServer2 hiveServer2 = null;
   private final File baseDir;
   private final Path baseDfsDir;
   private static final AtomicLong hs2Counter = new AtomicLong();
-  private static final String HS2_BINARY_MODE = "binary";
-  private static final String HS2_HTTP_MODE = "http";
   private MiniMrShim mr;
   private MiniDFSShim dfs;
   private boolean useMiniMR = false;
   private boolean useMiniKdc = false;
-  private String serverPrincipal;
-  private String serverKeytab;
+  private final String serverPrincipal;
+  private final String serverKeytab;
 
   public static class Builder {
     private HiveConf hiveConf = new HiveConf();
@@ -71,6 +66,7 @@ public class MiniHS2 extends AbstractHiv
     private boolean useMiniKdc = false;
     private String serverPrincipal;
     private String serverKeytab;
+    private boolean isHTTPTransMode = false;
 
     public Builder() {
     }
@@ -92,10 +88,25 @@ public class MiniHS2 extends AbstractHiv
       return this;
     }
 
+    /**
+     * Start HS2 with HTTP transport mode, default is binary mode
+     * @return this Builder
+     */
+    public Builder withHTTPTransport(){
+      this.isHTTPTransMode = true;
+      return this;
+    }
+
+
     public MiniHS2 build() throws Exception {
       if (useMiniMR && useMiniKdc) {
         throw new IOException("Can't create secure miniMr ... yet");
       }
+      if (isHTTPTransMode) {
+        hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HS2_HTTP_MODE);
+      } else {
+        hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HS2_BINARY_MODE);
+      }
       return new MiniHS2(hiveConf, useMiniMR, useMiniKdc, serverPrincipal, serverKeytab);
     }
   }
@@ -164,7 +175,6 @@ public class MiniHS2 extends AbstractHiv
     hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY, metaStoreURL);
     // reassign a new port, just in case if one of the MR services grabbed the last one
     setBinaryPort(MetaStoreUtils.findFreePort());
-    hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HS2_BINARY_MODE);
     hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, getHost());
     hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT, getBinaryPort());
     hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, getHttpPort());
@@ -253,16 +263,35 @@ public class MiniHS2 extends AbstractHiv
   /**
    * return connection URL for this server instance
    * @param dbName - DB name to be included in the URL
-   * @param urlExtension - Addional string to be appended to URL
+   * @param sessionConfExt - Addional string to be appended to sessionConf part of url
+   * @return
+   */
+  public String getJdbcURL(String dbName, String sessionConfExt) {
+    return getJdbcURL(dbName, sessionConfExt, "");
+  }
+
+  /**
+   * return connection URL for this server instance
+   * @param dbName - DB name to be included in the URL
+   * @param sessionConfExt - Addional string to be appended to sessionConf part of url
+   * @param hiveConfExt - Additional string to be appended to HiveConf part of url (excluding
the ?)
    * @return
    */
-  public String getJdbcURL(String dbName, String urlExtension) {
-    assert urlExtension != null;
+  public String getJdbcURL(String dbName, String sessionConfExt, String hiveConfExt) {
+    sessionConfExt = (sessionConfExt == null ? "" : sessionConfExt);
+    hiveConfExt = (hiveConfExt == null ? "" : hiveConfExt);
     String krbConfig = "";
     if (isUseMiniKdc()) {
       krbConfig = ";principal=" + serverPrincipal;
     }
-    return getBaseJdbcURL() + dbName + krbConfig + urlExtension;
+    if (isHttpTransportMode()) {
+      hiveConfExt = "hive.server2.transport.mode=http;hive.server2.thrift.http.path=cliservice;"
+          + hiveConfExt;
+    }
+    if (!hiveConfExt.trim().equals("")) {
+      hiveConfExt = "?" + hiveConfExt;
+    }
+    return getBaseJdbcURL() + dbName + krbConfig + sessionConfExt + hiveConfExt;
   }
 
   /**
@@ -270,8 +299,7 @@ public class MiniHS2 extends AbstractHiv
    * @return
    */
   public String getBaseJdbcURL() {
-    String transportMode = getConfProperty(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname);
-    if(transportMode != null && (transportMode.equalsIgnoreCase(HS2_HTTP_MODE)))
{
+    if(isHttpTransportMode()) {
       return "jdbc:hive2://" + getHost() + ":" + getHttpPort() + "/";
     }
     else {
@@ -279,6 +307,11 @@ public class MiniHS2 extends AbstractHiv
     }
   }
 
+  private boolean isHttpTransportMode() {
+    String transportMode = getConfProperty(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname);
+    return transportMode != null && (transportMode.equalsIgnoreCase(HS2_HTTP_MODE));
+  }
+
   public static String getJdbcDriverName() {
     return driverName;
   }

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java?rev=1590825&r1=1590824&r2=1590825&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java Mon Apr 28
22:08:58 2014
@@ -55,6 +55,8 @@ public class TestSSL {
   private Connection hs2Conn = null;
   private String dataFileDir = conf.get("test.data.files");
   private Map<String, String> confOverlay;
+  private final String SSL_CONN_PARAMS = ";ssl=true;sslTrustStore=" + dataFileDir + File.separator
+
+      TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD;
 
   @BeforeClass
   public static void beforeTest() throws Exception {
@@ -96,9 +98,8 @@ public class TestSSL {
     miniHS2.start(confOverlay);
     DriverManager.setLoginTimeout(4);
     try {
-      hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore="
+
-          dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword=" +
-          KEY_STORE_PASSWORD, System.getProperty("user.name"), "bar");
+      hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
+          System.getProperty("user.name"), "bar");
       fail("SSL connection should fail with NON-SSL server");
     } catch (SQLException e) {
       // expected error
@@ -123,11 +124,7 @@ public class TestSSL {
     setHttpConfOverlay(confOverlay);
     miniHS2.start(confOverlay);
     try {
-      hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() +
-          ";ssl=true;sslTrustStore=" + dataFileDir + File.separator +
-          TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD +
-          "?hive.server2.transport.mode=" + HS2_HTTP_MODE +
-          ";hive.server2.thrift.http.path=" + HS2_HTTP_ENDPOINT,
+      hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
           System.getProperty("user.name"), "bar");
       fail("SSL connection should fail with NON-SSL server");
     } catch (SQLException e) {
@@ -136,6 +133,7 @@ public class TestSSL {
     }
   }
 
+
   /***
    * Test non-SSL client with SSL server fails
    * @throws Exception
@@ -169,11 +167,7 @@ public class TestSSL {
     setHttpConfOverlay(confOverlay);
     miniHS2.start(confOverlay);
     try {
-      hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() +
-          ";ssl=false;sslTrustStore=" + dataFileDir + File.separator +
-          TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD +
-          "?hive.server2.transport.mode=" + HS2_HTTP_MODE +
-          ";hive.server2.thrift.http.path=" + HS2_HTTP_ENDPOINT,
+      hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", ";ssl=false"),
           System.getProperty("user.name"), "bar");
       fail("NON SSL connection should fail with SSL server");
     } catch (SQLException e) {
@@ -196,9 +190,8 @@ public class TestSSL {
     miniHS2.start(confOverlay);
 
     // make SSL connection
-    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore="
+
-        dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword=" +
-        KEY_STORE_PASSWORD, System.getProperty("user.name"), "bar");
+    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
+        System.getProperty("user.name"), "bar");
     hs2Conn.close();
     miniHS2.stop();
 
@@ -206,11 +199,7 @@ public class TestSSL {
     setHttpConfOverlay(confOverlay);
     miniHS2.start(confOverlay);
     // make SSL connection
-    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() +
-        ";ssl=true;sslTrustStore=" + dataFileDir + File.separator +
-        TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD +
-        "?hive.server2.transport.mode=" + HS2_HTTP_MODE +
-        ";hive.server2.thrift.http.path=" + HS2_HTTP_ENDPOINT,
+    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
         System.getProperty("user.name"), "bar");
     hs2Conn.close();
   }
@@ -239,9 +228,7 @@ public class TestSSL {
     setHttpConfOverlay(confOverlay);
     miniHS2.start(confOverlay);
     // make SSL connection
-    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() +
-        ";ssl=true;" + "?hive.server2.transport.mode=" + HS2_HTTP_MODE +
-        ";hive.server2.thrift.http.path=" + HS2_HTTP_ENDPOINT,
+    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
         System.getProperty("user.name"), "bar");
     hs2Conn.close();
   }
@@ -262,9 +249,8 @@ public class TestSSL {
     Path dataFilePath = new Path(dataFileDir, "kv1.txt");
 
     // make SSL connection
-    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore="
+
-        dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword=" +
-        KEY_STORE_PASSWORD, System.getProperty("user.name"), "bar");
+    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
+        System.getProperty("user.name"), "bar");
 
     // Set up test data
     setupTestTableWithData(tableName, dataFilePath, hs2Conn);
@@ -297,11 +283,7 @@ public class TestSSL {
     Path dataFilePath = new Path(dataFileDir, "kv1.txt");
 
     // make SSL connection
-    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() +
-        ";ssl=true;sslTrustStore=" + dataFileDir + File.separator +
-        TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD +
-        "?hive.server2.transport.mode=" + HS2_HTTP_MODE +
-        ";hive.server2.thrift.http.path=" + HS2_HTTP_ENDPOINT,
+    hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS),
         System.getProperty("user.name"), "bar");
 
     // Set up test data

Modified: hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java?rev=1590825&r1=1590824&r2=1590825&view=diff
==============================================================================
--- hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
(original)
+++ hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
Mon Apr 28 22:08:58 2014
@@ -609,6 +609,7 @@ public class HadoopThriftAuthBridge20S e
             clientUgi = UserGroupInformation.createProxyUser(
                 endUser, UserGroupInformation.getLoginUser());
             remoteUser.set(clientUgi.getShortUserName());
+            LOG.debug("Set remoteUser :" + remoteUser.get());
             return clientUgi.doAs(new PrivilegedExceptionAction<Boolean>() {
               @Override
               public Boolean run() {
@@ -620,7 +621,10 @@ public class HadoopThriftAuthBridge20S e
               }
             });
           } else {
-            remoteUser.set(endUser);
+            // use the short user name for the request
+            UserGroupInformation endUserUgi = UserGroupInformation.createRemoteUser(endUser);
+            remoteUser.set(endUserUgi.getShortUserName());
+            LOG.debug("Set remoteUser :" + remoteUser.get() + ", from endUser :" + endUser);
             return wrapped.process(inProt, outProt);
           }
         } catch (RuntimeException rte) {



Mime
View raw message