Return-Path: X-Original-To: apmail-hbase-commits-archive@www.apache.org Delivered-To: apmail-hbase-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 67353ED31 for ; Wed, 23 Jan 2013 19:32:28 +0000 (UTC) Received: (qmail 13293 invoked by uid 500); 23 Jan 2013 19:32:28 -0000 Delivered-To: apmail-hbase-commits-archive@hbase.apache.org Received: (qmail 13201 invoked by uid 500); 23 Jan 2013 19:32:28 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 13193 invoked by uid 99); 23 Jan 2013 19:32:28 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 23 Jan 2013 19:32:28 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 23 Jan 2013 19:32:26 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id E3A912388900; Wed, 23 Jan 2013 19:32:06 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1437643 - in /hbase/trunk: hbase-common/src/main/java/org/apache/hadoop/hbase/util/ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/ hbase-server/src/test/java/org... Date: Wed, 23 Jan 2013 19:32:06 -0000 To: commits@hbase.apache.org From: enis@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20130123193206.E3A912388900@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: enis Date: Wed Jan 23 19:32:06 2013 New Revision: 1437643 URL: http://svn.apache.org/viewvc?rev=1437643&view=rev Log: HBASE-6832. [WINDOWS] Tests should use explicit timestamp for Puts, and not rely on implicit RS timing Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java?rev=1437643&r1=1437642&r2=1437643&view=diff ============================================================================== --- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java (original) +++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java Wed Jan 23 19:32:06 2013 @@ -26,7 +26,22 @@ import org.apache.hadoop.classification. @InterfaceAudience.Private public class IncrementingEnvironmentEdge implements EnvironmentEdge { - private long timeIncrement = 1; + private long timeIncrement; + + /** + * Construct an incremental edge starting from currentTimeMillis + */ + public IncrementingEnvironmentEdge() { + this(System.currentTimeMillis()); + } + + /** + * Construct an incremental edge with an initial amount + * @param initialAmount the initial value to start with + */ + public IncrementingEnvironmentEdge(long initialAmount) { + this.timeIncrement = initialAmount; + } /** * {@inheritDoc} @@ -38,4 +53,12 @@ public class IncrementingEnvironmentEdge public synchronized long currentTimeMillis() { return timeIncrement++; } + + /** + * Increment the time by the given amount + */ + public synchronized long incrementTime(long amount) { + timeIncrement += amount; + return timeIncrement; + } } Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java?rev=1437643&r1=1437642&r2=1437643&view=diff ============================================================================== --- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java (original) +++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java Wed Jan 23 19:32:06 2013 @@ -18,31 +18,35 @@ */ package org.apache.hadoop.hbase.coprocessor; +import static junit.framework.Assert.assertEquals; + import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; -import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; -import org.apache.hadoop.hbase.coprocessor.ObserverContext; -import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; +import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge; import org.junit.AfterClass; +import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import static org.junit.Assert.assertEquals; - @Category(MediumTests.class) public class TestRegionObserverBypass { private static HBaseTestingUtility util; @@ -60,7 +64,6 @@ public class TestRegionObserverBypass { TestCoprocessor.class.getName()); util = new HBaseTestingUtility(conf); util.startMiniCluster(); - util.createTable(tableName, new byte[][] {dummy, test}); } @AfterClass @@ -68,6 +71,18 @@ public class TestRegionObserverBypass { util.shutdownMiniCluster(); } + @Before + public void setUp() throws Exception { + HBaseAdmin admin = util.getHBaseAdmin(); + if (admin.tableExists(tableName)) { + if (admin.isTableEnabled(tableName)) { + admin.disableTable(tableName); + } + admin.deleteTable(tableName); + } + util.createTable(tableName, new byte[][] {dummy, test}); + } + /** * do a single put that is bypassed by a RegionObserver * @throws Exception @@ -89,6 +104,10 @@ public class TestRegionObserverBypass { */ @Test public void testMulti() throws Exception { + //ensure that server time increments every time we do an operation, otherwise + //previous deletes will eclipse successive puts having the same timestamp + EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge()); + HTable t = new HTable(util.getConfiguration(), tableName); List puts = new ArrayList(); Put p = new Put(row1); @@ -170,6 +189,8 @@ public class TestRegionObserverBypass { checkRowAndDelete(t,row2,1); checkRowAndDelete(t,row3,0); t.close(); + + EnvironmentEdgeManager.reset(); } private void checkRowAndDelete(HTable t, byte[] row, int count) throws IOException { Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java?rev=1437643&r1=1437642&r2=1437643&view=diff ============================================================================== --- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java (original) +++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java Wed Jan 23 19:32:06 2013 @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.io.hfile. import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Threads; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -109,20 +110,24 @@ public class TestScannerSelectionUsingTT HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(info.getEncodedName()), conf, htd); + long ts = EnvironmentEdgeManager.currentTimeMillis(); + long version = 0; //make sure each new set of Put's have a new ts for (int iFile = 0; iFile < totalNumFiles; ++iFile) { if (iFile == NUM_EXPIRED_FILES) { Threads.sleepWithoutInterrupt(TTL_MS); + version += TTL_MS; } for (int iRow = 0; iRow < NUM_ROWS; ++iRow) { Put put = new Put(Bytes.toBytes("row" + iRow)); for (int iCol = 0; iCol < NUM_COLS_PER_ROW; ++iCol) { put.add(FAMILY_BYTES, Bytes.toBytes("col" + iCol), - Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol)); + ts + version, Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol)); } region.put(put); } region.flushcache(); + version++; } Scan scan = new Scan(); Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java?rev=1437643&r1=1437642&r2=1437643&view=diff ============================================================================== --- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java (original) +++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java Wed Jan 23 19:32:06 2013 @@ -63,7 +63,7 @@ public class TestLogsCleaner { public void testLogCleaning() throws Exception{ Configuration conf = TEST_UTIL.getConfiguration(); // set TTL - long ttl = 2000; + long ttl = 10000; conf.setLong("hbase.master.logcleaner.ttl", ttl); conf.setBoolean(HConstants.REPLICATION_ENABLE_KEY, true); Replication.decorateMasterConfiguration(conf); Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java?rev=1437643&r1=1437642&r2=1437643&view=diff ============================================================================== --- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java (original) +++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java Wed Jan 23 19:32:06 2013 @@ -21,13 +21,21 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.HBaseTestCase; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; +import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge; import org.junit.experimental.categories.Category; @Category(SmallTests.class) @@ -43,6 +51,28 @@ public class TestKeepDeletes extends HBa private final byte[] c0 = COLUMNS[0]; private final byte[] c1 = COLUMNS[1]; + @Override + protected void setUp() throws Exception { + super.setUp(); + /* HBASE-6832: [WINDOWS] Tests should use explicit timestamp for Puts, and not rely on + * implicit RS timing. + * Use an explicit timer (IncrementingEnvironmentEdge) so that the put, delete + * compact timestamps are tracked. Otherwise, forced major compaction will not purge + * Delete's having the same timestamp. see ScanQueryMatcher.match(): + * if (retainDeletesInOutput + * || (!isUserScan && (EnvironmentEdgeManager.currentTimeMillis() - timestamp) + * <= timeToPurgeDeletes) ... ) + * + */ + EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge()); + } + + @Override + protected void tearDown() throws Exception { + super.tearDown(); + EnvironmentEdgeManager.reset(); + } + /** * Make sure that deleted rows are retained. * Family delete markers are deleted. @@ -55,7 +85,7 @@ public class TestKeepDeletes extends HBa HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -138,7 +168,7 @@ public class TestKeepDeletes extends HBa } /** - * Even when the store does not keep deletes a "raw" scan will + * Even when the store does not keep deletes a "raw" scan will * return everything it can find (unless discarding cells is guaranteed * to have no effect). * Assuming this the desired behavior. Could also disallow "raw" scanning @@ -151,7 +181,7 @@ public class TestKeepDeletes extends HBa HConstants.FOREVER, false); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -195,7 +225,7 @@ public class TestKeepDeletes extends HBa HConstants.FOREVER, false); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -242,7 +272,7 @@ public class TestKeepDeletes extends HBa s.setRaw(true); s.setMaxVersions(); s.addColumn(c0, c0); - + try { InternalScanner scan = region.getScanner(s); fail("raw scanner with columns should have failed"); @@ -261,7 +291,7 @@ public class TestKeepDeletes extends HBa HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -307,7 +337,7 @@ public class TestKeepDeletes extends HBa HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Delete d = new Delete(T1, ts); d.deleteColumns(c0, c0, ts); @@ -320,7 +350,7 @@ public class TestKeepDeletes extends HBa d = new Delete(T1, ts); d.deleteColumn(c0, c0, ts+1); region.delete(d, true); - + d = new Delete(T1, ts); d.deleteColumn(c0, c0, ts+2); region.delete(d, true); @@ -349,7 +379,7 @@ public class TestKeepDeletes extends HBa HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); @@ -372,7 +402,7 @@ public class TestKeepDeletes extends HBa d = new Delete(T1, ts); d.deleteColumn(c0, c0, ts+1); region.delete(d, true); - + d = new Delete(T1, ts); d.deleteColumn(c0, c0, ts+2); region.delete(d, true); @@ -411,7 +441,7 @@ public class TestKeepDeletes extends HBa HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); p.add(c0, c1, T1); @@ -492,7 +522,7 @@ public class TestKeepDeletes extends HBa HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -502,7 +532,7 @@ public class TestKeepDeletes extends HBa p = new Put(T1, ts-10); p.add(c0, c1, T1); region.put(p); - + Delete d = new Delete(T1, ts); // test corner case (Put and Delete have same TS) d.deleteColumns(c0, c0, ts); @@ -511,7 +541,7 @@ public class TestKeepDeletes extends HBa d = new Delete(T1, ts+1); d.deleteColumn(c0, c0, ts+1); region.delete(d, true); - + d = new Delete(T1, ts+3); d.deleteColumn(c0, c0, ts+3); region.delete(d, true); @@ -527,7 +557,7 @@ public class TestKeepDeletes extends HBa p = new Put(T1, ts+2); p.add(c0, c0, T2); region.put(p); - + // delete, put, delete, delete, put assertEquals(3, countDeleteMarkers(region)); @@ -584,7 +614,7 @@ public class TestKeepDeletes extends HBa HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); @@ -634,7 +664,7 @@ public class TestKeepDeletes extends HBa HTableDescriptor htd = createTableDescriptor(getName(), 3, 1000, 1, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis() - 2000; // 2s in the past + long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000; // 2s in the past Put p = new Put(T1, ts); p.add(c0, c0, T3); Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java?rev=1437643&r1=1437642&r2=1437643&view=diff ============================================================================== --- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java (original) +++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java Wed Jan 23 19:32:06 2013 @@ -63,6 +63,7 @@ import org.apache.hadoop.hbase.security. import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; +import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge; import org.apache.hadoop.hbase.util.ManualEnvironmentEdge; import org.apache.hadoop.util.Progressable; import org.junit.experimental.categories.Category; @@ -186,6 +187,8 @@ public class TestStore extends TestCase public void testDeleteExpiredStoreFiles() throws Exception { int storeFileNum = 4; int ttl = 4; + IncrementingEnvironmentEdge edge = new IncrementingEnvironmentEdge(); + EnvironmentEdgeManagerTestHelper.injectEdge(edge); Configuration conf = HBaseConfiguration.create(); // Enable the expired store file deletion @@ -205,7 +208,7 @@ public class TestStore extends TestCase this.store.add(new KeyValue(row, family, qf2, timeStamp, (byte[]) null)); this.store.add(new KeyValue(row, family, qf3, timeStamp, (byte[]) null)); flush(i); - Thread.sleep(sleepTime); + edge.incrementTime(sleepTime); } // Verify the total number of store files @@ -220,8 +223,8 @@ public class TestStore extends TestCase // If not the first compaction, there is another empty store file, assertEquals(Math.min(i, 2), cr.getFiles().size()); for (int j = 0; i < cr.getFiles().size(); j++) { - assertTrue(cr.getFiles().get(j).getReader().getMaxTimestamp() < (System - .currentTimeMillis() - this.store.scanInfo.getTtl())); + assertTrue(cr.getFiles().get(j).getReader().getMaxTimestamp() < + (EnvironmentEdgeManager.currentTimeMillis() - this.store.scanInfo.getTtl())); } // Verify that the expired store file is compacted to an empty store file. StoreFile compactedFile = this.store.compact(cr); @@ -229,7 +232,7 @@ public class TestStore extends TestCase assertEquals(0, compactedFile.getReader().getEntries()); // Let the next store file expired. - Thread.sleep(sleepTime); + edge.incrementTime(sleepTime); } } Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java?rev=1437643&r1=1437642&r2=1437643&view=diff ============================================================================== --- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java (original) +++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java Wed Jan 23 19:32:06 2013 @@ -32,6 +32,9 @@ import org.apache.hadoop.hbase.HBaseTest import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType; import org.apache.hadoop.hbase.thrift.generated.Hbase; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; +import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge; import org.apache.hadoop.hbase.util.Threads; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TCompactProtocol; @@ -115,11 +118,15 @@ public class TestThriftServerCmdLine { @BeforeClass public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); + //ensure that server time increments every time we do an operation, otherwise + //successive puts having the same timestamp will override each other + EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge()); } @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); + EnvironmentEdgeManager.reset(); } private void startCmdLineThread(final String[] args) { Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java?rev=1437643&r1=1437642&r2=1437643&view=diff ============================================================================== --- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java (original) +++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java Wed Jan 23 19:32:06 2013 @@ -34,7 +34,7 @@ public class TestIncrementingEnvironment @Test public void testGetCurrentTimeUsesSystemClock() { - IncrementingEnvironmentEdge edge = new IncrementingEnvironmentEdge(); + IncrementingEnvironmentEdge edge = new IncrementingEnvironmentEdge(1); assertEquals(1, edge.currentTimeMillis()); assertEquals(2, edge.currentTimeMillis()); assertEquals(3, edge.currentTimeMillis());