Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 91ECA2009F4 for ; Thu, 26 May 2016 15:29:31 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 90542160A18; Thu, 26 May 2016 13:29:31 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 8B964160A17 for ; Thu, 26 May 2016 15:29:30 +0200 (CEST) Received: (qmail 24358 invoked by uid 500); 26 May 2016 13:29:29 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 24347 invoked by uid 99); 26 May 2016 13:29:29 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 26 May 2016 13:29:29 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 95E1ADFC6D; Thu, 26 May 2016 13:29:29 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: aihuaxu@apache.org To: commits@hive.apache.org Message-Id: <41d72d70dc4f45119fc897996fad2012@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: hive git commit: HIVE-13149: Remove some unnecessary HMS connections from HS2 (Reviewed by Jimmy Xiang, Szehon Ho, Chaoyu Tang) Date: Thu, 26 May 2016 13:29:29 +0000 (UTC) archived-at: Thu, 26 May 2016 13:29:31 -0000 Repository: hive Updated Branches: refs/heads/master 76130a9d5 -> 9bebaf619 HIVE-13149: Remove some unnecessary HMS connections from HS2 (Reviewed by Jimmy Xiang, Szehon Ho, Chaoyu Tang) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9bebaf61 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9bebaf61 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9bebaf61 Branch: refs/heads/master Commit: 9bebaf6196c1842c285bfbad50765170e064f6e4 Parents: 76130a9 Author: Aihua Xu Authored: Fri Apr 22 10:58:59 2016 -0400 Committer: Aihua Xu Committed: Thu May 26 09:28:25 2016 -0400 ---------------------------------------------------------------------- .../hive/metastore/TestMetastoreVersion.java | 7 ++-- .../hbase/TestHBaseMetastoreMetrics.java | 4 +-- .../apache/hive/jdbc/TestJdbcWithMiniHS2.java | 37 +++++++++++++++++--- .../hadoop/hive/hbase/HBaseQTestUtil.java | 6 ++++ .../hadoop/hive/hbase/HBaseTestSetup.java | 3 -- .../org/apache/hadoop/hive/ql/QTestUtil.java | 14 +++++--- .../hive/metastore/HiveMetaStoreClient.java | 10 +++--- .../hadoop/hive/ql/session/SessionState.java | 8 ----- 8 files changed, 59 insertions(+), 30 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hive/blob/9bebaf61/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java ---------------------------------------------------------------------- diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java index 53f0d0e..5ceb3d2 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.metastore; import java.io.File; import java.lang.reflect.Field; -import java.util.Random; import junit.framework.TestCase; @@ -32,6 +31,7 @@ import org.apache.hive.common.util.HiveStringUtils; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.ObjectStore; import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; @@ -96,8 +96,9 @@ public class TestMetastoreVersion extends TestCase { // session creation should fail since the schema didn't get created try { SessionState.start(new CliSessionState(hiveConf)); - fail("Expected exception"); - } catch (RuntimeException re) { + Hive.get(hiveConf).getMSC(); + fail("An exception is expected since schema is not created."); + } catch (Exception re) { LOG.info("Exception in testVersionRestriction: " + re, re); String msg = HiveStringUtils.stringifyException(re); assertTrue("Expected 'Version information not found in metastore' in: " + msg, msg http://git-wip-us.apache.org/repos/asf/hive/blob/9bebaf61/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreMetrics.java ---------------------------------------------------------------------- diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreMetrics.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreMetrics.java index 3ed88f2..aefafe0 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreMetrics.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreMetrics.java @@ -41,8 +41,6 @@ import java.io.IOException; */ public class TestHBaseMetastoreMetrics extends HBaseIntegrationTests { - private CodahaleMetrics metrics; - @BeforeClass public static void startup() throws Exception { HBaseIntegrationTests.startMiniCluster(); @@ -66,7 +64,6 @@ public class TestHBaseMetastoreMetrics extends HBaseIntegrationTests { conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name()); SessionState.start(new CliSessionState(conf)); driver = new Driver(conf); - metrics = (CodahaleMetrics) MetricsFactory.getInstance(); } @Test @@ -107,6 +104,7 @@ public class TestHBaseMetastoreMetrics extends HBaseIntegrationTests { driver.run("use default"); driver.run("drop database tempdb cascade"); + CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance(); String json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.CREATE_TOTAL_DATABASES, 2); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.CREATE_TOTAL_TABLES, 7); http://git-wip-us.apache.org/repos/asf/hive/blob/9bebaf61/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java ---------------------------------------------------------------------- diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java index 815ccfa..4aa98ca 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java @@ -86,9 +86,8 @@ public class TestJdbcWithMiniHS2 { miniHS2.start(confOverlay); } - @Before - public void setUp() throws Exception { - hs2Conn = getConnection(miniHS2.getJdbcURL(), System.getProperty("user.name"), "bar"); + private Connection getConnection() throws Exception { + return getConnection(miniHS2.getJdbcURL(), System.getProperty("user.name"), "bar"); } private Connection getConnection(String jdbcURL, String user, String pwd) throws SQLException { @@ -99,7 +98,9 @@ public class TestJdbcWithMiniHS2 { @After public void tearDown() throws Exception { - hs2Conn.close(); + if (hs2Conn != null) { + hs2Conn.close(); + } } @AfterClass @@ -112,6 +113,7 @@ public class TestJdbcWithMiniHS2 { @Test public void testConnection() throws Exception { String tableName = "testTab1"; + hs2Conn = getConnection(); Statement stmt = hs2Conn.createStatement(); // create table @@ -133,6 +135,7 @@ public class TestJdbcWithMiniHS2 { @Test public void testConcurrentStatements() throws Exception { String tableName = "testConcurrentStatements"; + hs2Conn = getConnection(); Statement stmt = hs2Conn.createStatement(); // create table @@ -311,6 +314,7 @@ public class TestJdbcWithMiniHS2 { stmt.execute(" drop table if exists table_in_non_default_schema"); expected = stmt.execute("DROP DATABASE "+ dbName); stmt.close(); + hs2Conn.close(); hs2Conn = getConnection(jdbcUri+"default",System.getProperty("user.name"),"bar"); stmt = hs2Conn .createStatement(); @@ -344,6 +348,7 @@ public class TestJdbcWithMiniHS2 { * get/set Schema are new in JDK7 and not available in java.sql.Connection in JDK6. * Hence the test uses HiveConnection object to call these methods so that test will run with older JDKs */ + hs2Conn = getConnection(); HiveConnection hiveConn = (HiveConnection)hs2Conn; assertEquals("default", hiveConn.getSchema()); @@ -377,6 +382,7 @@ public class TestJdbcWithMiniHS2 { */ private void verifyCurrentDB(String expectedDbName, Connection hs2Conn) throws Exception { String verifyTab = "miniHS2DbVerificationTable"; + hs2Conn = getConnection(); Statement stmt = hs2Conn.createStatement(); stmt.execute("DROP TABLE IF EXISTS " + expectedDbName + "." + verifyTab); stmt.execute("CREATE TABLE " + expectedDbName + "." + verifyTab + "(id INT)"); @@ -582,6 +588,7 @@ public class TestJdbcWithMiniHS2 { // Downloaded resources dir scratchDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR)); verifyScratchDir(conf, fs, scratchDirPath, expectedFSPermission, userName, true); + hs2Conn.close(); // 2. Test with doAs=true // Restart HiveServer2 with doAs=true @@ -608,6 +615,7 @@ public class TestJdbcWithMiniHS2 { // Downloaded resources dir scratchDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR)); verifyScratchDir(conf, fs, scratchDirPath, expectedFSPermission, userName, true); + hs2Conn.close(); // Test for user "trinity" userName = "trinity"; @@ -639,6 +647,7 @@ public class TestJdbcWithMiniHS2 { HiveConf testConf = new HiveConf(); assertTrue(testConf.getVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_WHITELIST).isEmpty()); // verify that udf in default whitelist can be executed + hs2Conn = getConnection(); Statement stmt = hs2Conn.createStatement(); stmt.executeQuery("SELECT substr('foobar', 4) "); hs2Conn.close(); @@ -680,10 +689,11 @@ public class TestJdbcWithMiniHS2 { public void testUdfBlackList() throws Exception { HiveConf testConf = new HiveConf(); assertTrue(testConf.getVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_BLACKLIST).isEmpty()); - + hs2Conn = getConnection(); Statement stmt = hs2Conn.createStatement(); // verify that udf in default whitelist can be executed stmt.executeQuery("SELECT substr('foobar', 4) "); + hs2Conn.close(); miniHS2.stop(); testConf.setVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_BLACKLIST, "reflect"); @@ -705,6 +715,9 @@ public class TestJdbcWithMiniHS2 { */ @Test public void testUdfBlackListOverride() throws Exception { + if (miniHS2.isStarted()) { + miniHS2.stop(); + } // setup whitelist HiveConf testConf = new HiveConf(); @@ -759,6 +772,8 @@ public class TestJdbcWithMiniHS2 { // HDFS scratch dir scratchDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR)); verifyScratchDir(conf, fs, scratchDirPath, expectedFSPermission, userName, false); + hs2Conn.close(); + // Test with multi-level scratch dir path // Stop HiveServer2 if (miniHS2.isStarted()) { @@ -808,6 +823,10 @@ public class TestJdbcWithMiniHS2 { hs2Conn = getConnection(miniHS2.getJdbcURL(), userName, "password"); } catch (Exception e) { fail("Not expecting exception: " + e); + } finally { + if (hs2Conn != null) { + hs2Conn.close(); + } } // This should fail with given HTTP response code 413 in error message, since header is more @@ -818,6 +837,10 @@ public class TestJdbcWithMiniHS2 { } catch (Exception e) { assertTrue("Header exception thrown", e != null); assertTrue(e.getMessage().contains("HTTP Response code: 413")); + } finally { + if (hs2Conn != null) { + hs2Conn.close(); + } } // Stop HiveServer2 to increase header size @@ -834,6 +857,10 @@ public class TestJdbcWithMiniHS2 { hs2Conn = getConnection(miniHS2.getJdbcURL(), userName, "password"); } catch (Exception e) { fail("Not expecting exception: " + e); + } finally { + if (hs2Conn != null) { + hs2Conn.close(); + } } } http://git-wip-us.apache.org/repos/asf/hive/blob/9bebaf61/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java ---------------------------------------------------------------------- diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java index 9d86e57..01faaba 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java @@ -72,6 +72,12 @@ public class HBaseQTestUtil extends QTestUtil { } @Override + protected void initConfFromSetup() throws Exception { + super.initConfFromSetup(); + hbaseSetup.preTest(conf); + } + + @Override public void createSources(String tname) throws Exception { super.createSources(tname); http://git-wip-us.apache.org/repos/asf/hive/blob/9bebaf61/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java ---------------------------------------------------------------------- diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java index e6383dc..cee7158 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseTestSetup.java @@ -22,9 +22,6 @@ import java.io.IOException; import java.net.ServerSocket; import java.util.Arrays; -import junit.extensions.TestSetup; -import junit.framework.Test; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; http://git-wip-us.apache.org/repos/asf/hive/blob/9bebaf61/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java ---------------------------------------------------------------------- diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index 11e529d..0a954fc 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -513,6 +513,7 @@ public class QTestUtil { dfs.shutdown(); dfs = null; } + Hive.closeCurrent(); } public String readEntireFileIntoString(File queryFile) throws IOException { @@ -734,8 +735,9 @@ public class QTestUtil { return; } - db.getConf().set("hive.metastore.filter.hook", + conf.set("hive.metastore.filter.hook", "org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl"); + db = Hive.get(conf); // Delete any tables other than the source tables // and any databases other than the default database. for (String dbName : db.getAllDatabases()) { @@ -803,16 +805,20 @@ public class QTestUtil { return; } - clearTablesCreatedDuringTests(); - clearKeysCreatedInTests(); - // allocate and initialize a new conf since a test can // modify conf by using 'set' commands conf = new HiveConf(Driver.class); initConf(); + initConfFromSetup(); + // renew the metastore since the cluster type is unencrypted db = Hive.get(conf); // propagate new conf to meta store + clearTablesCreatedDuringTests(); + clearKeysCreatedInTests(); + } + + protected void initConfFromSetup() throws Exception { setup.preTest(conf); } http://git-wip-us.apache.org/repos/asf/hive/blob/9bebaf61/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java ---------------------------------------------------------------------- diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 7d5ddee..16843af 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -187,7 +187,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient { private boolean isConnected = false; private URI metastoreUris[]; private final HiveMetaHookLoader hookLoader; - protected final HiveConf conf; + protected final HiveConf conf; // Keep a copy of HiveConf so if Session conf changes, we may need to get a new HMS client. protected boolean fastpath = false; private String tokenStrForm; private final boolean localMetaStore; @@ -214,8 +214,10 @@ public class HiveMetaStoreClient implements IMetaStoreClient { this.hookLoader = hookLoader; if (conf == null) { conf = new HiveConf(HiveMetaStoreClient.class); + this.conf = conf; + } else { + this.conf = new HiveConf(conf); } - this.conf = conf; filterHook = loadFilterHooks(); fileMetadataBatchSize = HiveConf.getIntVar( conf, HiveConf.ConfVars.METASTORE_BATCH_RETRIEVE_OBJECTS_MAX); @@ -230,10 +232,10 @@ public class HiveMetaStoreClient implements IMetaStoreClient { // instantiate the metastore server handler directly instead of connecting // through the network if (conf.getBoolVar(ConfVars.METASTORE_FASTPATH)) { - client = new HiveMetaStore.HMSHandler("hive client", conf, true); + client = new HiveMetaStore.HMSHandler("hive client", this.conf, true); fastpath = true; } else { - client = HiveMetaStore.newRetryingHMSHandler("hive client", conf, true); + client = HiveMetaStore.newRetryingHMSHandler("hive client", this.conf, true); } isConnected = true; snapshotActiveConf(); http://git-wip-us.apache.org/repos/asf/hive/blob/9bebaf61/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index ce43f7d..96c826b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -539,10 +539,6 @@ public class SessionState { // Get the following out of the way when you start the session these take a // while and should be done when we start up. try { - // Hive object instance should be created with a copy of the conf object. If the conf is - // shared with SessionState, other parts of the code might update the config, but - // Hive.get(HiveConf) would not recognize the case when it needs refreshing - Hive.get(new HiveConf(startSs.sessionConf)).getMSC(); UserGroupInformation sessionUGI = Utils.getUGI(); FileSystem.get(startSs.sessionConf); @@ -568,10 +564,6 @@ public class SessionState { } } catch (RuntimeException e) { throw e; - } catch (Hive.SchemaException e) { - RuntimeException ex = new RuntimeException(e.getMessage()); - ex.setStackTrace(new StackTraceElement[0]); - throw ex; } catch (Exception e) { // Catch-all due to some exec time dependencies on session state // that would cause ClassNoFoundException otherwise