Return-Path: X-Original-To: apmail-hbase-commits-archive@www.apache.org Delivered-To: apmail-hbase-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 655DCDE22 for ; Mon, 20 May 2013 16:37:25 +0000 (UTC) Received: (qmail 19973 invoked by uid 500); 20 May 2013 16:28:11 -0000 Delivered-To: apmail-hbase-commits-archive@hbase.apache.org Received: (qmail 12559 invoked by uid 500); 20 May 2013 16:26:39 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 19435 invoked by uid 99); 20 May 2013 15:44:38 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 20 May 2013 15:44:38 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 20 May 2013 15:44:35 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 030DE23889E3; Mon, 20 May 2013 15:44:15 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1484503 - /hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java Date: Mon, 20 May 2013 15:44:14 -0000 To: commits@hbase.apache.org From: stack@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20130520154415.030DE23889E3@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: stack Date: Mon May 20 15:44:14 2013 New Revision: 1484503 URL: http://svn.apache.org/r1484503 Log: HBASE-8067 TestHFileArchiving.testArchiveOnTableDelete sometimes fails Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java?rev=1484503&r1=1484502&r2=1484503&view=diff ============================================================================== --- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java (original) +++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java Mon May 20 15:44:14 2013 @@ -51,7 +51,6 @@ import org.apache.hadoop.hbase.util.Stop import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; -import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -63,11 +62,8 @@ import org.junit.experimental.categories @Category(MediumTests.class) public class TestHFileArchiving { - private static final String STRING_TABLE_NAME = "test_table"; - private static final Log LOG = LogFactory.getLog(TestHFileArchiving.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); - private static final byte[] TABLE_NAME = Bytes.toBytes(STRING_TABLE_NAME); private static final byte[] TEST_FAM = Bytes.toBytes("fam"); /** @@ -95,18 +91,9 @@ public class TestHFileArchiving { ConstantSizeRegionSplitPolicy.class.getName()); } - @Before - public void setup() throws Exception { - UTIL.createTable(TABLE_NAME, TEST_FAM); - } - @After public void tearDown() throws Exception { - // cleanup the cluster if its up still - if (UTIL.getHBaseAdmin().tableExists(STRING_TABLE_NAME)) { - UTIL.deleteTable(TABLE_NAME); - } - // and cleanup the archive directory + // cleanup the archive directory try { clearArchiveDirectory(); } catch (IOException e) { @@ -125,6 +112,9 @@ public class TestHFileArchiving { @Test public void testRemovesRegionDirOnArchive() throws Exception { + byte[] TABLE_NAME = Bytes.toBytes("testRemovesRegionDirOnArchive"); + UTIL.createTable(TABLE_NAME, TEST_FAM); + final HBaseAdmin admin = UTIL.getHBaseAdmin(); // get the current store files for the region @@ -137,7 +127,7 @@ public class TestHFileArchiving { UTIL.loadRegion(region, TEST_FAM); // shutdown the table so we can manipulate the files - admin.disableTable(STRING_TABLE_NAME); + admin.disableTable(TABLE_NAME); FileSystem fs = UTIL.getTestFileSystem(); @@ -161,6 +151,8 @@ public class TestHFileArchiving { // then ensure the region's directory isn't present assertFalse(fs.exists(regionDir)); + + UTIL.deleteTable(TABLE_NAME); } /** @@ -170,6 +162,9 @@ public class TestHFileArchiving { */ @Test public void testDeleteRegionWithNoStoreFiles() throws Exception { + byte[] TABLE_NAME = Bytes.toBytes("testDeleteRegionWithNoStoreFiles"); + UTIL.createTable(TABLE_NAME, TEST_FAM); + // get the current store files for the region List servingRegions = UTIL.getHBaseCluster().getRegions(TABLE_NAME); // make sure we only have 1 region serving this table @@ -209,10 +204,15 @@ public class TestHFileArchiving { // and check to make sure the region directoy got deleted assertFalse("Region directory (" + regionDir + "), still exists.", fs.exists(regionDir)); + + UTIL.deleteTable(TABLE_NAME); } @Test public void testArchiveOnTableDelete() throws Exception { + byte[] TABLE_NAME = Bytes.toBytes("testArchiveOnTableDelete"); + UTIL.createTable(TABLE_NAME, TEST_FAM); + List servingRegions = UTIL.getHBaseCluster().getRegions(TABLE_NAME); // make sure we only have 1 region serving this table assertEquals(1, servingRegions.size()); @@ -273,6 +273,9 @@ public class TestHFileArchiving { */ @Test public void testArchiveOnTableFamilyDelete() throws Exception { + byte[] TABLE_NAME = Bytes.toBytes("testArchiveOnTableFamilyDelete"); + UTIL.createTable(TABLE_NAME, TEST_FAM); + List servingRegions = UTIL.getHBaseCluster().getRegions(TABLE_NAME); // make sure we only have 1 region serving this table assertEquals(1, servingRegions.size()); @@ -324,6 +327,8 @@ public class TestHFileArchiving { assertTrue("Archived files are missing some of the store files!", archivedFiles.containsAll(storeFiles)); + + UTIL.deleteTable(TABLE_NAME); } /**