Return-Path: X-Original-To: apmail-hive-commits-archive@www.apache.org Delivered-To: apmail-hive-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 52C0DEF50 for ; Wed, 20 Feb 2013 11:44:49 +0000 (UTC) Received: (qmail 5925 invoked by uid 500); 20 Feb 2013 11:44:49 -0000 Delivered-To: apmail-hive-commits-archive@hive.apache.org Received: (qmail 5676 invoked by uid 500); 20 Feb 2013 11:44:45 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 5633 invoked by uid 99); 20 Feb 2013 11:44:43 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 20 Feb 2013 11:44:43 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 20 Feb 2013 11:44:41 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 0D5D323889DE; Wed, 20 Feb 2013 11:44:23 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1448101 - in /hive/trunk/metastore/src: java/org/apache/hadoop/hive/metastore/ test/org/apache/hadoop/hive/metastore/ Date: Wed, 20 Feb 2013 11:44:22 -0000 To: commits@hive.apache.org From: namit@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20130220114423.0D5D323889DE@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: namit Date: Wed Feb 20 11:44:21 2013 New Revision: 1448101 URL: http://svn.apache.org/r1448101 Log: HIVE-4004 Incorrect status for AddPartition metastore event if RawStore commit fails (Dilip Joseph via namit) Added: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyListener.java Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1448101&r1=1448100&r2=1448101&view=diff ============================================================================== --- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original) +++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Wed Feb 20 11:44:21 2013 @@ -1603,8 +1603,8 @@ public class HiveMetaStore extends Thrif private int add_partitions_core(final RawStore ms, final List parts) throws MetaException, InvalidObjectException, AlreadyExistsException { String db = parts.get(0).getDbName(); - String tbl = parts.get(0).getTableName(); - logInfo("add_partitions : db=" + db + " tbl=" + tbl); + String tblName = parts.get(0).getTableName(); + logInfo("add_partitions : db=" + db + " tbl=" + tblName); boolean success = false; Map addedPartitions = new HashMap(); @@ -1615,8 +1615,7 @@ public class HiveMetaStore extends Thrif Entry e = add_partition_core_notxn(ms, part, null); addedPartitions.put(e.getKey(), e.getValue()); } - success = true; - ms.commitTransaction(); + success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); @@ -1627,6 +1626,9 @@ public class HiveMetaStore extends Thrif } } } + for (Partition part : parts) { + fireMetaStoreAddPartitionEvent(ms, part, null, success); + } } return parts.size(); } @@ -1773,12 +1775,6 @@ public class HiveMetaStore extends Thrif wh.deleteDir(partLocation, true); } } - for (MetaStoreEventListener listener : listeners) { - AddPartitionEvent addPartitionEvent = - new AddPartitionEvent(tbl, part, success, this); - addPartitionEvent.setEnvironmentContext(envContext); - listener.onAddPartition(addPartitionEvent); - } } Map returnVal = new HashMap(); returnVal.put(part, madeDir); @@ -1800,10 +1796,23 @@ public class HiveMetaStore extends Thrif if (!success) { ms.rollbackTransaction(); } + fireMetaStoreAddPartitionEvent(ms, part, envContext, success); } return retPtn; } + private void fireMetaStoreAddPartitionEvent(final RawStore ms, + final Partition part, final EnvironmentContext envContext, boolean success) + throws MetaException { + final Table tbl = ms.getTable(part.getDbName(), part.getTableName()); + for (MetaStoreEventListener listener : listeners) { + AddPartitionEvent addPartitionEvent = + new AddPartitionEvent(tbl, part, success, this); + addPartitionEvent.setEnvironmentContext(envContext); + listener.onAddPartition(addPartitionEvent); + } + } + @Override public Partition add_partition(final Partition part) throws InvalidObjectException, AlreadyExistsException, MetaException { Modified: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyListener.java URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyListener.java?rev=1448101&r1=1448100&r2=1448101&view=diff ============================================================================== --- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyListener.java (original) +++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyListener.java Wed Feb 20 11:44:21 2013 @@ -42,6 +42,17 @@ public class DummyListener extends MetaS public static final List notifyList = new ArrayList(); + /** + * @return The last event received, or null if no event was received. + */ + public static ListenerEvent getLastEvent() { + if (notifyList.isEmpty()) { + return null; + } else { + return notifyList.get(notifyList.size() - 1); + } + } + public DummyListener(Configuration config) { super(config); } Added: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java?rev=1448101&view=auto ============================================================================== --- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java (added) +++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java Wed Feb 20 11:44:21 2013 @@ -0,0 +1,530 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore; + +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.api.ColumnStatistics; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.Index; +import org.apache.hadoop.hive.metastore.api.InvalidInputException; +import org.apache.hadoop.hive.metastore.api.InvalidObjectException; +import org.apache.hadoop.hive.metastore.api.InvalidPartitionException; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PartitionEventType; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.Role; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.metastore.api.Type; +import org.apache.hadoop.hive.metastore.api.UnknownDBException; +import org.apache.hadoop.hive.metastore.api.UnknownPartitionException; +import org.apache.hadoop.hive.metastore.api.UnknownTableException; +import org.apache.hadoop.hive.metastore.model.MDBPrivilege; +import org.apache.hadoop.hive.metastore.model.MGlobalPrivilege; +import org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege; +import org.apache.hadoop.hive.metastore.model.MPartitionPrivilege; +import org.apache.hadoop.hive.metastore.model.MRoleMap; +import org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege; +import org.apache.hadoop.hive.metastore.model.MTablePrivilege; + +/** + * A wrapper around {@link org.apache.hadoop.hive.metastore.ObjectStore} + * with the ability to control the result of commitTransaction(). + * All other functions simply delegate to an embedded ObjectStore object. + * Ideally, we should have just extended ObjectStore instead of using + * delegation. However, since HiveMetaStore uses a Proxy, this class must + * not inherit from any other class. + */ +public class DummyRawStoreControlledCommit implements RawStore, Configurable { + + private final ObjectStore objectStore; + public DummyRawStoreControlledCommit() { + objectStore = new ObjectStore(); + } + + /** + * If true, shouldCommit() will simply call delegate commitTransaction() to the + * underlying ObjectStore. + * If false, shouldCommit() immediately returns false. + */ + private static boolean shouldCommitSucceed = true; + public static void setCommitSucceed(boolean flag) { + shouldCommitSucceed = flag; + } + + @Override + public boolean commitTransaction() { + if (shouldCommitSucceed) { + return objectStore.commitTransaction(); + } else { + return false; + } + } + + // All remaining functions simply delegate to objectStore + + @Override + public Configuration getConf() { + return objectStore.getConf(); + } + + @Override + public void setConf(Configuration conf) { + objectStore.setConf(conf); + } + + @Override + public void shutdown() { + objectStore.shutdown(); + } + + @Override + public boolean openTransaction() { + return objectStore.openTransaction(); + } + + @Override + public void rollbackTransaction() { + objectStore.rollbackTransaction(); + } + + @Override + public void createDatabase(Database db) throws InvalidObjectException, MetaException { + objectStore.createDatabase(db); + } + + @Override + public Database getDatabase(String dbName) throws NoSuchObjectException { + return objectStore.getDatabase(dbName); + } + + @Override + public boolean dropDatabase(String dbName) + throws NoSuchObjectException, MetaException { + return objectStore.dropDatabase(dbName); + } + + @Override + public boolean alterDatabase(String dbName, Database db) + throws NoSuchObjectException, MetaException { + + return objectStore.alterDatabase(dbName, db); + } + + @Override + public List getDatabases(String pattern) throws MetaException { + return objectStore.getDatabases(pattern); + } + + @Override + public List getAllDatabases() throws MetaException { + return objectStore.getAllDatabases(); + } + + @Override + public boolean createType(Type type) { + return objectStore.createType(type); + } + + @Override + public Type getType(String typeName) { + return objectStore.getType(typeName); + } + + @Override + public boolean dropType(String typeName) { + return objectStore.dropType(typeName); + } + + @Override + public void createTable(Table tbl) throws InvalidObjectException, MetaException { + objectStore.createTable(tbl); + } + + @Override + public boolean dropTable(String dbName, String tableName) + throws MetaException, NoSuchObjectException, + InvalidObjectException, InvalidInputException { + return objectStore.dropTable(dbName, tableName); + } + + @Override + public Table getTable(String dbName, String tableName) throws MetaException { + return objectStore.getTable(dbName, tableName); + } + + @Override + public boolean addPartition(Partition part) + throws InvalidObjectException, MetaException { + return objectStore.addPartition(part); + } + + @Override + public Partition getPartition(String dbName, String tableName, List partVals) + throws MetaException, NoSuchObjectException { + return objectStore.getPartition(dbName, tableName, partVals); + } + + @Override + public boolean dropPartition(String dbName, String tableName, List partVals) + throws MetaException, NoSuchObjectException, + InvalidObjectException, InvalidInputException { + return objectStore.dropPartition(dbName, tableName, partVals); + } + + @Override + public List getPartitions(String dbName, String tableName, int max) + throws MetaException { + return objectStore.getPartitions(dbName, tableName, max); + } + + @Override + public void alterTable(String dbName, String name, Table newTable) + throws InvalidObjectException, MetaException { + objectStore.alterTable(dbName, name, newTable); + } + + @Override + public List getTables(String dbName, String pattern) throws MetaException { + return objectStore.getTables(dbName, pattern); + } + + @Override + public List getTableObjectsByName(String dbName, List tableNames) + throws MetaException, UnknownDBException { + return objectStore.getTableObjectsByName(dbName, tableNames); + } + + @Override + public List getAllTables(String dbName) throws MetaException { + return objectStore.getAllTables(dbName); + } + + @Override + public List listTableNamesByFilter(String dbName, String filter, + short maxTables) throws MetaException, UnknownDBException { + return objectStore.listTableNamesByFilter(dbName, filter, maxTables); + } + + @Override + public List listPartitionNames(String dbName, String tblName, short maxParts) + throws MetaException { + return objectStore.listPartitionNames(dbName, tblName, maxParts); + } + + @Override + public List listPartitionNamesByFilter(String dbName, String tblName, + String filter, short maxParts) throws MetaException { + return objectStore.listPartitionNamesByFilter(dbName, tblName, filter, maxParts); + } + + @Override + public void alterPartition(String dbName, String tblName, List partVals, + Partition newPart) throws InvalidObjectException, MetaException { + objectStore.alterPartition(dbName, tblName, partVals, newPart); + } + + @Override + public void alterPartitions(String dbName, String tblName, + List> partValsList, List newParts) + throws InvalidObjectException, MetaException { + objectStore.alterPartitions(dbName, tblName, partValsList, newParts); + } + + @Override + public boolean addIndex(Index index) throws InvalidObjectException, MetaException { + return objectStore.addIndex(index); + } + + @Override + public Index getIndex(String dbName, String origTableName, String indexName) + throws MetaException { + return objectStore.getIndex(dbName, origTableName, indexName); + } + + @Override + public boolean dropIndex(String dbName, String origTableName, String indexName) + throws MetaException { + return objectStore.dropIndex(dbName, origTableName, indexName); + } + + @Override + public List getIndexes(String dbName, String origTableName, int max) + throws MetaException { + return objectStore.getIndexes(dbName, origTableName, max); + } + + @Override + public List listIndexNames(String dbName, String origTableName, short max) + throws MetaException { + return objectStore.listIndexNames(dbName, origTableName, max); + } + + @Override + public void alterIndex(String dbName, String baseTblName, String name, Index newIndex) + throws InvalidObjectException, MetaException { + objectStore.alterIndex(dbName, baseTblName, name, newIndex); + } + + @Override + public List getPartitionsByFilter(String dbName, String tblName, + String filter, short maxParts) throws MetaException, NoSuchObjectException { + return objectStore.getPartitionsByFilter(dbName, tblName, filter, maxParts); + } + + @Override + public List getPartitionsByNames(String dbName, String tblName, + List partNames) throws MetaException, NoSuchObjectException { + return objectStore.getPartitionsByNames(dbName, tblName, partNames); + } + + @Override + public Table markPartitionForEvent(String dbName, String tblName, + Map partVals, PartitionEventType evtType) + throws MetaException, UnknownTableException, InvalidPartitionException, + UnknownPartitionException { + return objectStore.markPartitionForEvent(dbName, tblName, partVals, evtType); + } + + @Override + public boolean isPartitionMarkedForEvent(String dbName, String tblName, + Map partName, PartitionEventType evtType) + throws MetaException, UnknownTableException, InvalidPartitionException, + UnknownPartitionException { + return objectStore.isPartitionMarkedForEvent(dbName, tblName, partName, evtType); + } + + @Override + public boolean addRole(String rowName, String ownerName) throws InvalidObjectException, + MetaException, NoSuchObjectException { + return objectStore.addRole(rowName, ownerName); + } + + @Override + public boolean removeRole(String roleName) + throws MetaException, NoSuchObjectException { + return objectStore.removeRole(roleName); + } + + @Override + public boolean grantRole(Role role, String userName, PrincipalType principalType, + String grantor, PrincipalType grantorType, boolean grantOption) + throws MetaException, NoSuchObjectException, InvalidObjectException { + return objectStore.grantRole(role, userName, principalType, grantor, grantorType, + grantOption); + } + + @Override + public boolean revokeRole(Role role, String userName, PrincipalType principalType) + throws MetaException, NoSuchObjectException { + return objectStore.revokeRole(role, userName, principalType); + } + + @Override + public PrincipalPrivilegeSet getUserPrivilegeSet(String userName, + List groupNames) throws InvalidObjectException, MetaException { + return objectStore.getUserPrivilegeSet(userName, groupNames); + } + + @Override + public PrincipalPrivilegeSet getDBPrivilegeSet(String dbName, String userName, + List groupNames) throws InvalidObjectException, MetaException { + return objectStore.getDBPrivilegeSet(dbName, userName, groupNames); + } + + @Override + public PrincipalPrivilegeSet getTablePrivilegeSet(String dbName, String tableName, + String userName, List groupNames) + throws InvalidObjectException, MetaException { + return objectStore.getTablePrivilegeSet(dbName, tableName, userName, groupNames); + } + + @Override + public PrincipalPrivilegeSet getPartitionPrivilegeSet(String dbName, String tableName, + String partition, String userName, List groupNames) + throws InvalidObjectException, MetaException { + return objectStore.getPartitionPrivilegeSet(dbName, tableName, partition, + userName, groupNames); + } + + @Override + public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName, String tableName, + String partitionName, String columnName, String userName, List groupNames) + throws InvalidObjectException, MetaException { + return objectStore.getColumnPrivilegeSet(dbName, tableName, partitionName, + columnName, userName, groupNames); + } + + @Override + public List listPrincipalGlobalGrants(String principalName, + PrincipalType principalType) { + return objectStore.listPrincipalGlobalGrants(principalName, principalType); + } + + @Override + public List listPrincipalDBGrants(String principalName, + PrincipalType principalType, String dbName) { + return objectStore.listPrincipalDBGrants(principalName, principalType, dbName); + } + + @Override + public List listAllTableGrants(String principalName, + PrincipalType principalType, String dbName, String tableName) { + return objectStore.listAllTableGrants(principalName, principalType, + dbName, tableName); + } + + @Override + public List listPrincipalPartitionGrants(String principalName, + PrincipalType principalType, String dbName, String tableName, String partName) { + return objectStore.listPrincipalPartitionGrants(principalName, principalType, + dbName, tableName, partName); + } + + @Override + public List listPrincipalTableColumnGrants(String principalName, + PrincipalType principalType, String dbName, String tableName, String columnName) { + return objectStore.listPrincipalTableColumnGrants(principalName, principalType, + dbName, tableName, columnName); + } + + @Override + public List listPrincipalPartitionColumnGrants( + String principalName, PrincipalType principalType, String dbName, String tableName, + String partName, String columnName) { + return objectStore.listPrincipalPartitionColumnGrants(principalName, principalType, + dbName, tableName, partName, columnName); + } + + @Override + public boolean grantPrivileges(PrivilegeBag privileges) throws InvalidObjectException, + MetaException, NoSuchObjectException { + return objectStore.grantPrivileges(privileges); + } + + @Override + public boolean revokePrivileges(PrivilegeBag privileges) throws InvalidObjectException, + MetaException, NoSuchObjectException { + return objectStore.revokePrivileges(privileges); + } + + @Override + public Role getRole(String roleName) throws NoSuchObjectException { + return objectStore.getRole(roleName); + } + + @Override + public List listRoleNames() { + return objectStore.listRoleNames(); + } + + @Override + public List listRoles(String principalName, PrincipalType principalType) { + return objectStore.listRoles(principalName, principalType); + } + + @Override + public Partition getPartitionWithAuth(String dbName, String tblName, + List partVals, String userName, List groupNames) + throws MetaException, NoSuchObjectException, InvalidObjectException { + return objectStore.getPartitionWithAuth(dbName, tblName, partVals, userName, + groupNames); + } + + @Override + public List getPartitionsWithAuth(String dbName, String tblName, + short maxParts, String userName, List groupNames) + throws MetaException, NoSuchObjectException, InvalidObjectException { + return objectStore.getPartitionsWithAuth(dbName, tblName, maxParts, userName, + groupNames); + } + + @Override + public List listPartitionNamesPs(String dbName, String tblName, + List partVals, short maxParts) + throws MetaException, NoSuchObjectException { + return objectStore.listPartitionNamesPs(dbName, tblName, partVals, maxParts); + } + + @Override + public List listPartitionsPsWithAuth(String dbName, String tblName, + List partVals, short maxParts, String userName, List groupNames) + throws MetaException, InvalidObjectException, NoSuchObjectException { + return objectStore.listPartitionsPsWithAuth(dbName, tblName, partVals, maxParts, + userName, groupNames); + } + + @Override + public long cleanupEvents() { + return objectStore.cleanupEvents(); + } + + @Override + public ColumnStatistics getTableColumnStatistics(String dbName, String tableName, + String colName) + throws MetaException, NoSuchObjectException, InvalidInputException { + return objectStore.getTableColumnStatistics(dbName, tableName, colName); + } + + @Override + public boolean deleteTableColumnStatistics(String dbName, String tableName, + String colName) + throws NoSuchObjectException, MetaException, InvalidObjectException, + InvalidInputException { + return objectStore.deleteTableColumnStatistics(dbName, tableName, colName); + } + + public boolean deletePartitionColumnStatistics(String dbName, String tableName, + String partName, List partVals, String colName) + throws NoSuchObjectException, MetaException, InvalidObjectException, + InvalidInputException { + return objectStore.deletePartitionColumnStatistics(dbName, tableName, partName, + partVals, colName); + } + + @Override + public ColumnStatistics getPartitionColumnStatistics(String dbName, String tableName, + String partName, List partVal, String colName) + throws MetaException, NoSuchObjectException, InvalidInputException, + InvalidObjectException { + return objectStore.getPartitionColumnStatistics(dbName, tableName, partName, + partVal, colName); + } + + @Override + public boolean updateTableColumnStatistics(ColumnStatistics statsObj) + throws NoSuchObjectException, MetaException, InvalidObjectException, + InvalidInputException { + return objectStore.updateTableColumnStatistics(statsObj); + } + + public boolean updatePartitionColumnStatistics(ColumnStatistics statsObj, + List partVals) + throws NoSuchObjectException, MetaException, InvalidObjectException, + InvalidInputException { + return objectStore.updatePartitionColumnStatistics(statsObj, partVals); + } +} Added: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java?rev=1448101&view=auto ============================================================================== --- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java (added) +++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java Wed Feb 20 11:44:21 2013 @@ -0,0 +1,104 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore; + +import java.util.List; + +import junit.framework.TestCase; + +import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.events.ListenerEvent; +import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.shims.ShimLoader; + +/** + * Ensure that the status of MetaStore events depend on the RawStore's commit status. + */ +public class TestMetaStoreEventListenerOnlyOnCommit extends TestCase { + + private HiveConf hiveConf; + private HiveMetaStoreClient msc; + private Driver driver; + + @Override + protected void setUp() throws Exception { + + super.setUp(); + + DummyRawStoreControlledCommit.setCommitSucceed(true); + + System.setProperty(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS.varname, + DummyListener.class.getName()); + System.setProperty(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL.varname, + DummyRawStoreControlledCommit.class.getName()); + + int port = MetaStoreUtils.findFreePort(); + MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge()); + + hiveConf = new HiveConf(this.getClass()); + hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); + hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); + hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); + hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); + hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + SessionState.start(new CliSessionState(hiveConf)); + msc = new HiveMetaStoreClient(hiveConf, null); + driver = new Driver(hiveConf); + + DummyListener.notifyList.clear(); + } + + public void testEventStatus() throws Exception { + int listSize = 0; + List notifyList = DummyListener.notifyList; + assertEquals(notifyList.size(), listSize); + + driver.run("CREATE DATABASE tmpDb"); + listSize += 1; + notifyList = DummyListener.notifyList; + assertEquals(notifyList.size(), listSize); + assertTrue(DummyListener.getLastEvent().getStatus()); + + driver.run("CREATE TABLE unittest_TestMetaStoreEventListenerOnlyOnCommit (id INT) " + + "PARTITIONED BY (ds STRING)"); + listSize += 1; + notifyList = DummyListener.notifyList; + assertEquals(notifyList.size(), listSize); + assertTrue(DummyListener.getLastEvent().getStatus()); + + driver.run("ALTER TABLE unittest_TestMetaStoreEventListenerOnlyOnCommit " + + "ADD PARTITION(ds='foo1')"); + listSize += 1; + notifyList = DummyListener.notifyList; + assertEquals(notifyList.size(), listSize); + assertTrue(DummyListener.getLastEvent().getStatus()); + + DummyRawStoreControlledCommit.setCommitSucceed(false); + + driver.run("ALTER TABLE unittest_TestMetaStoreEventListenerOnlyOnCommit " + + "ADD PARTITION(ds='foo2')"); + listSize += 1; + notifyList = DummyListener.notifyList; + assertEquals(notifyList.size(), listSize); + assertFalse(DummyListener.getLastEvent().getStatus()); + + } +}