hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ga...@apache.org
Subject [14/17] hive git commit: HIVE-17234 Remove HBase metastore from master (Alan Gates, reviewed by Daniel Dai and Sergey Shelukhin)
Date Fri, 04 Aug 2017 21:14:35 GMT
http://git-wip-us.apache.org/repos/asf/hive/blob/5e061557/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java
deleted file mode 100644
index c29e46a..0000000
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hadoop.hive.metastore.hbase;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import java.io.IOException;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * Integration tests with HBase Mini-cluster for HBaseStore
- */
-public class TestStorageDescriptorSharing extends HBaseIntegrationTests {
-
-  private static final Logger LOG = LoggerFactory.getLogger(TestHBaseStoreIntegration.class.getName());
-
-  private MessageDigest md;
-
-  @BeforeClass
-  public static void startup() throws Exception {
-    HBaseIntegrationTests.startMiniCluster();
-  }
-
-  @AfterClass
-  public static void shutdown() throws Exception {
-    HBaseIntegrationTests.shutdownMiniCluster();
-  }
-
-  @Before
-  public void setup() throws IOException {
-    setupConnection();
-    setupHBaseStore();
-    try {
-      md = MessageDigest.getInstance("MD5");
-    } catch (NoSuchAlgorithmException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Test
-  public void createManyPartitions() throws Exception {
-    String dbName = "default";
-    String tableName = "manyParts";
-    int startTime = (int)(System.currentTimeMillis() / 1000);
-    List<FieldSchema> cols = new ArrayList<FieldSchema>();
-    cols.add(new FieldSchema("col1", "int", "nocomment"));
-    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
-    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false,
0,
-        serde, null, null, emptyParameters);
-    List<FieldSchema> partCols = new ArrayList<FieldSchema>();
-    partCols.add(new FieldSchema("pc", "string", ""));
-    Table table = new Table(tableName, dbName, "me", startTime, startTime, 0, sd, partCols,
-        emptyParameters, null, null, null);
-    store.createTable(table);
-
-    List<String> partVals = Arrays.asList("alan", "bob", "carl", "doug", "ethan");
-    for (String val : partVals) {
-      List<String> vals = new ArrayList<String>();
-      vals.add(val);
-      StorageDescriptor psd = new StorageDescriptor(sd);
-      psd.setLocation("file:/tmp/pc=" + val);
-      Partition part = new Partition(vals, dbName, tableName, startTime, startTime, psd,
-          emptyParameters);
-      store.addPartition(part);
-
-      Partition p = store.getPartition(dbName, tableName, vals);
-      Assert.assertEquals("file:/tmp/pc=" + val, p.getSd().getLocation());
-    }
-
-    Assert.assertEquals(1, HBaseReadWrite.getInstance().countStorageDescriptor());
-
-    String tableName2 = "differentTable";
-    sd = new StorageDescriptor(cols, "file:/tmp", "input2", "output", false, 0,
-        serde, null, null, emptyParameters);
-    table = new Table(tableName2, "default", "me", startTime, startTime, 0, sd, null,
-        emptyParameters, null, null, null);
-    store.createTable(table);
-
-    Assert.assertEquals(2, HBaseReadWrite.getInstance().countStorageDescriptor());
-
-    // Drop one of the partitions and make sure it doesn't drop the storage descriptor
-    store.dropPartition(dbName, tableName, Arrays.asList(partVals.get(0)));
-    Assert.assertEquals(2, HBaseReadWrite.getInstance().countStorageDescriptor());
-
-    // Alter the second table in a few ways to make sure it changes it's descriptor properly
-    table = store.getTable(dbName, tableName2);
-    byte[] sdHash = HBaseUtils.hashStorageDescriptor(table.getSd(), md);
-
-    // Alter the table without touching the storage descriptor
-    table.setLastAccessTime(startTime + 1);
-    store.alterTable(dbName, tableName2, table);
-    Assert.assertEquals(2, HBaseReadWrite.getInstance().countStorageDescriptor());
-    table = store.getTable(dbName, tableName2);
-    byte[] alteredHash = HBaseUtils.hashStorageDescriptor(table.getSd(), md);
-    Assert.assertArrayEquals(sdHash, alteredHash);
-
-    // Alter the table, changing the storage descriptor
-    table.getSd().setOutputFormat("output_changed");
-    store.alterTable(dbName, tableName2, table);
-    Assert.assertEquals(2, HBaseReadWrite.getInstance().countStorageDescriptor());
-    table = store.getTable(dbName, tableName2);
-    alteredHash = HBaseUtils.hashStorageDescriptor(table.getSd(), md);
-    Assert.assertFalse(Arrays.equals(sdHash, alteredHash));
-
-    // Alter one of the partitions without touching the storage descriptor
-    Partition part = store.getPartition(dbName, tableName, Arrays.asList(partVals.get(1)));
-    sdHash = HBaseUtils.hashStorageDescriptor(part.getSd(), md);
-    part.setLastAccessTime(part.getLastAccessTime() + 1);
-    store.alterPartition(dbName, tableName, Arrays.asList(partVals.get(1)), part);
-    Assert.assertEquals(2, HBaseReadWrite.getInstance().countStorageDescriptor());
-    part = store.getPartition(dbName, tableName, Arrays.asList(partVals.get(1)));
-    alteredHash = HBaseUtils.hashStorageDescriptor(part.getSd(), md);
-    Assert.assertArrayEquals(sdHash, alteredHash);
-
-    // Alter the partition, changing the storage descriptor
-    part.getSd().setOutputFormat("output_changed_some_more");
-    store.alterPartition(dbName, tableName, Arrays.asList(partVals.get(1)), part);
-    Assert.assertEquals(3, HBaseReadWrite.getInstance().countStorageDescriptor());
-    part = store.getPartition(dbName, tableName, Arrays.asList(partVals.get(1)));
-    alteredHash = HBaseUtils.hashStorageDescriptor(part.getSd(), md);
-    Assert.assertFalse(Arrays.equals(sdHash, alteredHash));
-
-    // Alter multiple partitions without touching the storage descriptors
-    List<Partition> parts = store.getPartitions(dbName, tableName, -1);
-    sdHash = HBaseUtils.hashStorageDescriptor(parts.get(1).getSd(), md);
-    for (int i = 1; i < 3; i++) {
-      parts.get(i).setLastAccessTime(97);
-    }
-    List<List<String>> listPartVals = new ArrayList<List<String>>();
-    for (String pv : partVals.subList(1, partVals.size())) {
-      listPartVals.add(Arrays.asList(pv));
-    }
-    store.alterPartitions(dbName, tableName, listPartVals, parts);
-    Assert.assertEquals(3, HBaseReadWrite.getInstance().countStorageDescriptor());
-    parts = store.getPartitions(dbName, tableName, -1);
-    alteredHash = HBaseUtils.hashStorageDescriptor(parts.get(1).getSd(), md);
-    Assert.assertArrayEquals(sdHash, alteredHash);
-
-    // Alter multiple partitions changning the storage descriptors
-    parts = store.getPartitions(dbName, tableName, -1);
-    sdHash = HBaseUtils.hashStorageDescriptor(parts.get(1).getSd(), md);
-    for (int i = 1; i < 3; i++) {
-      parts.get(i).getSd().setOutputFormat("yet_a_different_of");
-    }
-    store.alterPartitions(dbName, tableName, listPartVals, parts);
-    Assert.assertEquals(4, HBaseReadWrite.getInstance().countStorageDescriptor());
-    parts = store.getPartitions(dbName, tableName, -1);
-    alteredHash = HBaseUtils.hashStorageDescriptor(parts.get(1).getSd(), md);
-    Assert.assertFalse(Arrays.equals(sdHash, alteredHash));
-
-    for (String partVal : partVals.subList(1, partVals.size())) {
-      store.dropPartition(dbName, tableName, Arrays.asList(partVal));
-    }
-    store.dropTable(dbName, tableName);
-    store.dropTable(dbName, tableName2);
-
-    Assert.assertEquals(0, HBaseReadWrite.getInstance().countStorageDescriptor());
-
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/5e061557/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
index c17ca10..dd2be96 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
@@ -90,7 +90,7 @@ public class TestLocationQueries extends BaseTestQueries {
         String hadoopVer, String locationSubdir)
       throws Exception
     {
-      super(outDir, logDir, miniMr, null, hadoopVer, "", "", false, false);
+      super(outDir, logDir, miniMr, null, hadoopVer, "", "", false);
       this.locationSubdir = locationSubdir;
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/5e061557/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java
b/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java
index 749abb5..a4dd07e 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java
@@ -29,7 +29,7 @@ public class AccumuloQTestUtil extends QTestUtil {
   public AccumuloQTestUtil(String outDir, String logDir, MiniClusterType miniMr,
       AccumuloTestSetup setup, String initScript, String cleanupScript) throws Exception
{
 
-    super(outDir, logDir, miniMr, null, "0.20", initScript, cleanupScript, false, false);
+    super(outDir, logDir, miniMr, null, "0.20", initScript, cleanupScript, false);
     setup.setupWithHiveConf(conf);
     this.setup = setup;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/5e061557/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
index c12f51e..72336e2 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
@@ -45,7 +45,7 @@ public abstract class AbstractCliConfig {
   public static final String HIVE_ROOT = getHiveRoot();
 
   public static enum MetastoreType {
-    sql, hbase
+    sql
   };
 
   private MetastoreType metastoreType = MetastoreType.sql;
@@ -413,8 +413,6 @@ public abstract class AbstractCliConfig {
     if (metaStoreTypeProperty != null) {
       if (metaStoreTypeProperty.equalsIgnoreCase("sql")) {
         metastoreType = MetastoreType.sql;
-      } else if (metaStoreTypeProperty.equalsIgnoreCase("hbase")) {
-        metastoreType = MetastoreType.hbase;
       } else {
         throw new IllegalArgumentException("Unknown metastore type: " + metaStoreTypeProperty);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/5e061557/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
index 02abe53..67e03a4 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
@@ -55,11 +55,10 @@ public abstract class AbstractCoreBlobstoreCliDriver extends CliAdapter
{
     String hiveConfDir = cliConfig.getHiveConfDir();
     String initScript = cliConfig.getInitScript();
     String cleanupScript = cliConfig.getCleanupScript();
-    boolean useHBaseMetastore = cliConfig.getMetastoreType() == MetastoreType.hbase;
     try {
       String hadoopVer = cliConfig.getHadoopVersion();
       qt = new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR,
-          hiveConfDir, hadoopVer, initScript, cleanupScript, useHBaseMetastore, true);
+          hiveConfDir, hadoopVer, initScript, cleanupScript, true);
 
       if (Strings.isNullOrEmpty(qt.getConf().get(HCONF_TEST_BLOBSTORE_PATH))) {
         fail(String.format("%s must be set. Try setting in blobstore-conf.xml", HCONF_TEST_BLOBSTORE_PATH));

http://git-wip-us.apache.org/repos/asf/hive/blob/5e061557/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
index d59b650..a1762ec 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
@@ -55,7 +55,6 @@ public class CoreCliDriver extends CliAdapter {
     final String hiveConfDir = cliConfig.getHiveConfDir();
     final String initScript = cliConfig.getInitScript();
     final String cleanupScript = cliConfig.getCleanupScript();
-    final boolean useHBaseMetastore = cliConfig.getMetastoreType() == MetastoreType.hbase;
     try {
       final String hadoopVer = cliConfig.getHadoopVersion();
 
@@ -63,8 +62,7 @@ public class CoreCliDriver extends CliAdapter {
         @Override
         public QTestUtil invokeInternal() throws Exception {
           return new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR,
-              hiveConfDir, hadoopVer, initScript, cleanupScript, useHBaseMetastore, true,
-              cliConfig.getFsType());
+              hiveConfDir, hadoopVer, initScript, cleanupScript, true, cliConfig.getFsType());
         }
       }.invoke("QtestUtil instance created", LOG, true);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/5e061557/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
index bff81dd..64b419b 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
@@ -52,7 +52,7 @@ public class CoreCompareCliDriver extends CliAdapter{
     try {
       String hadoopVer = cliConfig.getHadoopVersion();
       qt = new QTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR,
-      hiveConfDir, hadoopVer, initScript, cleanupScript, false, false);
+      hiveConfDir, hadoopVer, initScript, cleanupScript, false);
 
       // do a one time initialization
       qt.cleanUp();

http://git-wip-us.apache.org/repos/asf/hive/blob/5e061557/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java
index 438a61e..7947988 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java
@@ -45,7 +45,7 @@ public class CoreNegativeCliDriver extends CliAdapter{
     try {
       String hadoopVer = cliConfig.getHadoopVersion();
       qt = new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR,
-       hiveConfDir, hadoopVer, initScript, cleanupScript, false, false);
+       hiveConfDir, hadoopVer, initScript, cleanupScript, false);
       // do a one time initialization
       qt.cleanUp();
       qt.createSources();

http://git-wip-us.apache.org/repos/asf/hive/blob/5e061557/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
index 34eeb77..d80bd44 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
@@ -61,7 +61,7 @@ public class CorePerfCliDriver extends CliAdapter{
       String hadoopVer = cliConfig.getHadoopVersion();
       qt = new QTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR, hiveConfDir,
           hadoopVer, initScript,
-          cleanupScript, false, false);
+          cleanupScript, false);
 
       // do a one time initialization
       qt.cleanUp();

http://git-wip-us.apache.org/repos/asf/hive/blob/5e061557/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
index 01faaba..aeb7215 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
@@ -46,7 +46,7 @@ public class HBaseQTestUtil extends QTestUtil {
     String initScript, String cleanupScript)
     throws Exception {
 
-    super(outDir, logDir, miniMr, null, "0.20", initScript, cleanupScript, false, false);
+    super(outDir, logDir, miniMr, null, "0.20", initScript, cleanupScript, false);
     hbaseSetup = setup;
     hbaseSetup.preTest(conf);
     this.conn = setup.getConnection();

http://git-wip-us.apache.org/repos/asf/hive/blob/5e061557/itests/util/src/main/java/org/apache/hadoop/hive/metastore/hbase/HBaseStoreTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/metastore/hbase/HBaseStoreTestUtil.java
b/itests/util/src/main/java/org/apache/hadoop/hive/metastore/hbase/HBaseStoreTestUtil.java
deleted file mode 100644
index 21e8f7e..0000000
--- a/itests/util/src/main/java/org/apache/hadoop/hive/metastore/hbase/HBaseStoreTestUtil.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hadoop.hive.metastore.hbase;
-
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hive.conf.HiveConf;
-
-import java.util.List;
-
-public class HBaseStoreTestUtil {
-  public static void initHBaseMetastore(HBaseAdmin admin, HiveConf conf) throws Exception
{
-    for (String tableName : HBaseReadWrite.tableNames) {
-      List<byte[]> families = HBaseReadWrite.columnFamilies.get(tableName);
-      HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
-      for (byte[] family : families) {
-        HColumnDescriptor columnDesc = new HColumnDescriptor(family);
-        desc.addFamily(columnDesc);
-      }
-      admin.createTable(desc);
-    }
-    admin.close();
-    if (conf != null) {
-      HBaseReadWrite.setConf(conf);
-    }
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/5e061557/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 535cfd9..825f826 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -103,7 +103,6 @@ import org.apache.hadoop.hive.llap.daemon.MiniLlapCluster;
 import org.apache.hadoop.hive.llap.io.api.LlapProxy;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.Index;
-import org.apache.hadoop.hive.metastore.hbase.HBaseStore;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -202,7 +201,6 @@ public class QTestUtil {
 
   private final String initScript;
   private final String cleanupScript;
-  private boolean useHBaseMetastore = false;
 
   public interface SuiteAddTestFunctor {
     public void addTestToSuite(TestSuite suite, Object setup, String tName);
@@ -347,14 +345,9 @@ public class QTestUtil {
       conf.setBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED, true);
     }
 
-    if (!useHBaseMetastore) {
-      // Plug verifying metastore in for testing DirectSQL.
-      conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
-          "org.apache.hadoop.hive.metastore.VerifyingObjectStore");
-    } else {
-      conf.setVar(ConfVars.METASTORE_RAW_STORE_IMPL, HBaseStore.class.getName());
-      conf.setBoolVar(ConfVars.METASTORE_FASTPATH, true);
-    }
+    // Plug verifying metastore in for testing DirectSQL.
+    conf.setVar(ConfVars.METASTORE_RAW_STORE_IMPL,
+        "org.apache.hadoop.hive.metastore.VerifyingObjectStore");
 
     if (mr != null) {
       mr.setupConfiguration(conf);
@@ -514,40 +507,22 @@ public class QTestUtil {
     return "jceks://file" + new Path(keyDir, "test.jks").toUri();
   }
 
-  private void startMiniHBaseCluster() throws Exception {
-    Configuration hbaseConf = HBaseConfiguration.create();
-    hbaseConf.setInt("hbase.master.info.port", -1);
-    utility = new HBaseTestingUtility(hbaseConf);
-    utility.startMiniCluster();
-    conf = new HiveConf(utility.getConfiguration(), Driver.class);
-    HBaseAdmin admin = utility.getHBaseAdmin();
-    // Need to use reflection here to make compilation pass since HBaseIntegrationTests
-    // is not compiled in hadoop-1. All HBaseMetastore tests run under hadoop-2, so this
-    // guarantee HBaseIntegrationTests exist when we hitting this code path
-    java.lang.reflect.Method initHBaseMetastoreMethod = Class.forName(
-        "org.apache.hadoop.hive.metastore.hbase.HBaseStoreTestUtil")
-        .getMethod("initHBaseMetastore", HBaseAdmin.class, HiveConf.class);
-    initHBaseMetastoreMethod.invoke(null, admin, conf);
-    conf.setVar(ConfVars.METASTORE_RAW_STORE_IMPL, HBaseStore.class.getName());
-    conf.setBoolVar(ConfVars.METASTORE_FASTPATH, true);
-  }
-
   public QTestUtil(String outDir, String logDir, MiniClusterType clusterType,
                    String confDir, String hadoopVer, String initScript, String cleanupScript,
-                   boolean useHBaseMetastore, boolean withLlapIo) throws Exception {
+                   boolean withLlapIo) throws Exception {
     this(outDir, logDir, clusterType, confDir, hadoopVer, initScript, cleanupScript,
-        useHBaseMetastore, withLlapIo, null);
+        withLlapIo, null);
   }
 
   public QTestUtil(String outDir, String logDir, MiniClusterType clusterType,
       String confDir, String hadzoopVer, String initScript, String cleanupScript,
-      boolean useHBaseMetastore, boolean withLlapIo, FsType fsType)
+      boolean withLlapIo, FsType fsType)
     throws Exception {
     LOG.info("Setting up QTestUtil with outDir={}, logDir={}, clusterType={}, confDir={},"
+
-        " hadoopVer={}, initScript={}, cleanupScript={}, useHbaseMetaStore={}, withLlapIo={},"
+
+        " hadoopVer={}, initScript={}, cleanupScript={}, withLlapIo={}," +
             " fsType={}"
         , outDir, logDir, clusterType, confDir, hadoopVer, initScript, cleanupScript,
-        useHBaseMetastore, withLlapIo, fsType);
+        withLlapIo, fsType);
     Preconditions.checkNotNull(clusterType, "ClusterType cannot be null");
     if (fsType != null) {
       this.fsType = fsType;
@@ -556,7 +531,6 @@ public class QTestUtil {
     }
     this.outDir = outDir;
     this.logDir = logDir;
-    this.useHBaseMetastore = useHBaseMetastore;
     this.srcTables=getSrcTables();
     this.srcUDFs = getSrcUDFs();
 
@@ -567,11 +541,7 @@ public class QTestUtil {
     }
 
     queryState = new QueryState.Builder().withHiveConf(new HiveConf(Driver.class)).build();
-    if (useHBaseMetastore) {
-      startMiniHBaseCluster();
-    } else {
-      conf = queryState.getConf();
-    }
+    conf = queryState.getConf();
     this.hadoopVer = getHadoopMainVersion(hadoopVer);
     qMap = new TreeMap<String, String>();
     qSkipSet = new HashSet<String>();
@@ -697,9 +667,6 @@ public class QTestUtil {
         sparkSession = null;
       }
     }
-    if (useHBaseMetastore) {
-      utility.shutdownMiniCluster();
-    }
     if (mr != null) {
       mr.shutdown();
       mr = null;
@@ -2032,8 +1999,7 @@ public class QTestUtil {
     for (int i = 0; i < qfiles.length; i++) {
       qt[i] = new QTestUtil(resDir, logDir, MiniClusterType.none, null, "0.20",
         initScript == null ? defaultInitScript : initScript,
-        cleanupScript == null ? defaultCleanupScript : cleanupScript,
-        false, false);
+        cleanupScript == null ? defaultCleanupScript : cleanupScript, false);
       qt[i].addFile(qfiles[i]);
       qt[i].clearTestSideEffects();
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/5e061557/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java
index 31f69a3..30ac6d1 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java
@@ -54,8 +54,7 @@ public class CoreParseNegative extends CliAdapter{
     try {
       String hadoopVer = cliConfig.getHadoopVersion();
       qt = new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR, null,
-          hadoopVer,
-       initScript, cleanupScript, false, false);
+          hadoopVer, initScript, cleanupScript, false);
     } catch (Exception e) {
       System.err.println("Exception: " + e.getMessage());
       e.printStackTrace();

http://git-wip-us.apache.org/repos/asf/hive/blob/5e061557/metastore/pom.xml
----------------------------------------------------------------------
diff --git a/metastore/pom.xml b/metastore/pom.xml
index 073d039..1fa17a7 100644
--- a/metastore/pom.xml
+++ b/metastore/pom.xml
@@ -304,11 +304,6 @@
                     <mkdir dir="${protobuf.build.dir}"/>
                     <exec executable="protoc" failonerror="true">
                       <arg value="--java_out=${protobuf.build.dir}"/>
-                      <arg value="-I=${protobuf.src.dir}/org/apache/hadoop/hive/metastore/hbase"/>
-                      <arg value="${protobuf.src.dir}/org/apache/hadoop/hive/metastore/hbase/hbase_metastore_proto.proto"/>
-                    </exec>
-                    <exec executable="protoc" failonerror="true">
-                      <arg value="--java_out=${protobuf.build.dir}"/>
                       <arg value="-I=${protobuf.src.dir}/org/apache/hadoop/hive/metastore"/>
                       <arg value="${protobuf.src.dir}/org/apache/hadoop/hive/metastore/metastore.proto"/>
                     </exec>


Mime
View raw message