Return-Path: X-Original-To: apmail-hbase-commits-archive@www.apache.org Delivered-To: apmail-hbase-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 18E0618606 for ; Fri, 26 Feb 2016 23:09:30 +0000 (UTC) Received: (qmail 22598 invoked by uid 500); 26 Feb 2016 23:09:23 -0000 Delivered-To: apmail-hbase-commits-archive@hbase.apache.org Received: (qmail 22463 invoked by uid 500); 26 Feb 2016 23:09:23 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 21641 invoked by uid 99); 26 Feb 2016 23:09:23 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 26 Feb 2016 23:09:23 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id D2B77E0414; Fri, 26 Feb 2016 23:09:22 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: syuanjiang@apache.org To: commits@hbase.apache.org Date: Fri, 26 Feb 2016 23:09:53 -0000 Message-Id: <8cef2e65633343bca02fc3e9e40d521e@git.apache.org> In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [32/37] hbase git commit: HBASE-15205 Do not find the replication scope for every WAL#append() (Ram) http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java index 567e09d..e9bb468 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java @@ -22,6 +22,8 @@ package org.apache.hadoop.hbase.regionserver; import static org.junit.Assert.assertTrue; import java.io.IOException; +import java.util.NavigableMap; +import java.util.TreeMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -208,13 +210,17 @@ public class TestWALLockup { HTableDescriptor htd = new HTableDescriptor(TableName.META_TABLE_NAME); final HRegion region = initHRegion(tableName, null, null, dodgyWAL); byte [] bytes = Bytes.toBytes(getName()); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + scopes.put(COLUMN_FAMILY_BYTES, 0); try { // First get something into memstore. Make a Put and then pull the Cell out of it. Will // manage append and sync carefully in below to manufacture hang. We keep adding same // edit. WAL subsystem doesn't care. Put put = new Put(bytes); put.addColumn(COLUMN_FAMILY_BYTES, Bytes.toBytes("1"), bytes); - WALKey key = new WALKey(region.getRegionInfo().getEncodedNameAsBytes(), htd.getTableName()); + WALKey key = new WALKey(region.getRegionInfo().getEncodedNameAsBytes(), htd.getTableName(), + scopes); WALEdit edit = new WALEdit(); CellScanner CellScanner = put.cellScanner(); assertTrue(CellScanner.advance()); @@ -228,7 +234,7 @@ public class TestWALLockup { LOG.info("SET throwing of exception on append"); dodgyWAL.throwException = true; // This append provokes a WAL roll request - dodgyWAL.append(htd, region.getRegionInfo(), key, edit, true); + dodgyWAL.append(region.getRegionInfo(), key, edit, true); boolean exception = false; try { dodgyWAL.sync(); http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java index fd6d535..c60b225 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java @@ -28,7 +28,9 @@ import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Comparator; import java.util.List; +import java.util.NavigableMap; import java.util.Set; +import java.util.TreeMap; import java.util.UUID; import org.apache.commons.lang.mutable.MutableBoolean; @@ -152,12 +154,9 @@ public class TestFSHLog { } } - protected void addEdits(WAL log, - HRegionInfo hri, - HTableDescriptor htd, - int times, - MultiVersionConcurrencyControl mvcc) - throws IOException { + protected void addEdits(WAL log, HRegionInfo hri, HTableDescriptor htd, int times, + MultiVersionConcurrencyControl mvcc, NavigableMap scopes) + throws IOException { final byte[] row = Bytes.toBytes("row"); for (int i = 0; i < times; i++) { long timestamp = System.currentTimeMillis(); @@ -165,8 +164,8 @@ public class TestFSHLog { cols.add(new KeyValue(row, row, row, timestamp, row)); WALKey key = new WALKey(hri.getEncodedNameAsBytes(), htd.getTableName(), WALKey.NO_SEQUENCE_ID, timestamp, WALKey.EMPTY_UUIDS, HConstants.NO_NONCE, - HConstants.NO_NONCE, mvcc); - log.append(htd, hri, key, cols, true); + HConstants.NO_NONCE, mvcc, scopes); + log.append(hri, key, cols, true); } log.sync(); } @@ -261,11 +260,21 @@ public class TestFSHLog { new HRegionInfo(t2.getTableName(), HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW); // add edits and roll the wal MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(); + NavigableMap scopes1 = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : t1.getFamiliesKeys()) { + scopes1.put(fam, 0); + } + NavigableMap scopes2 = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : t2.getFamiliesKeys()) { + scopes2.put(fam, 0); + } try { - addEdits(wal, hri1, t1, 2, mvcc); + addEdits(wal, hri1, t1, 2, mvcc, scopes1); wal.rollWriter(); // add some more edits and roll the wal. This would reach the log number threshold - addEdits(wal, hri1, t1, 2, mvcc); + addEdits(wal, hri1, t1, 2, mvcc, scopes1); wal.rollWriter(); // with above rollWriter call, the max logs limit is reached. assertTrue(wal.getNumRolledLogFiles() == 2); @@ -276,7 +285,7 @@ public class TestFSHLog { assertEquals(1, regionsToFlush.length); assertEquals(hri1.getEncodedNameAsBytes(), regionsToFlush[0]); // insert edits in second region - addEdits(wal, hri2, t2, 2, mvcc); + addEdits(wal, hri2, t2, 2, mvcc, scopes2); // get the regions to flush, it should still read region1. regionsToFlush = wal.findRegionsToForceFlush(); assertEquals(regionsToFlush.length, 1); @@ -293,12 +302,12 @@ public class TestFSHLog { // no wal should remain now. assertEquals(0, wal.getNumRolledLogFiles()); // add edits both to region 1 and region 2, and roll. - addEdits(wal, hri1, t1, 2, mvcc); - addEdits(wal, hri2, t2, 2, mvcc); + addEdits(wal, hri1, t1, 2, mvcc, scopes1); + addEdits(wal, hri2, t2, 2, mvcc, scopes2); wal.rollWriter(); // add edits and roll the writer, to reach the max logs limit. assertEquals(1, wal.getNumRolledLogFiles()); - addEdits(wal, hri1, t1, 2, mvcc); + addEdits(wal, hri1, t1, 2, mvcc, scopes1); wal.rollWriter(); // it should return two regions to flush, as the oldest wal file has entries // for both regions. @@ -310,7 +319,7 @@ public class TestFSHLog { wal.rollWriter(true); assertEquals(0, wal.getNumRolledLogFiles()); // Add an edit to region1, and roll the wal. - addEdits(wal, hri1, t1, 2, mvcc); + addEdits(wal, hri1, t1, 2, mvcc, scopes1); // tests partial flush: roll on a partial flush, and ensure that wal is not archived. wal.startCacheFlush(hri1.getEncodedNameAsBytes(), t1.getFamiliesKeys()); wal.rollWriter(); @@ -360,6 +369,11 @@ public class TestFSHLog { HBaseTestingUtility.closeRegionAndWAL(r); final int countPerFamily = 10; final MutableBoolean goslow = new MutableBoolean(false); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } // subclass and doctor a method. FSHLog wal = new FSHLog(FileSystem.get(conf), TEST_UTIL.getDefaultRootDirPath(), testName, conf) { @@ -403,9 +417,9 @@ public class TestFSHLog { for (int i = 0; i < countPerFamily; i++) { final HRegionInfo info = region.getRegionInfo(); final WALKey logkey = new WALKey(info.getEncodedNameAsBytes(), tableName, - System.currentTimeMillis(), clusterIds, -1, -1, region.getMVCC()); - wal.append(htd, info, logkey, edits, true); - } + System.currentTimeMillis(), clusterIds, -1, -1, region.getMVCC(), scopes); + wal.append(info, logkey, edits, true); + } region.flush(true); // FlushResult.flushSequenceId is not visible here so go get the current sequence id. long currentSequenceId = region.getReadPoint(null); @@ -439,11 +453,16 @@ public class TestFSHLog { syncRunnerIndexField.set(ringBufferEventHandler, Integer.MAX_VALUE - 1); HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("t1")).addFamily(new HColumnDescriptor("row")); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } HRegionInfo hri = new HRegionInfo(htd.getTableName(), HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW); MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(); for (int i = 0; i < 10; i++) { - addEdits(log, hri, htd, 1, mvcc); + addEdits(log, hri, htd, 1, mvcc, scopes); } } finally { log.close(); http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java index 9dccffe..c05e7f0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java @@ -19,6 +19,9 @@ package org.apache.hadoop.hbase.regionserver.wal; import java.io.FileNotFoundException; import java.io.IOException; +import java.util.NavigableMap; +import java.util.TreeMap; + import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -199,8 +202,13 @@ public class TestLogRollAbort { kvs.add(new KeyValue(Bytes.toBytes(i), tableName.getName(), tableName.getName())); HTableDescriptor htd = new HTableDescriptor(tableName); htd.addFamily(new HColumnDescriptor("column")); - log.append(htd, regioninfo, new WALKey(regioninfo.getEncodedNameAsBytes(), tableName, - System.currentTimeMillis(), mvcc), kvs, true); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } + log.append(regioninfo, new WALKey(regioninfo.getEncodedNameAsBytes(), tableName, + System.currentTimeMillis(), mvcc, scopes), kvs, true); } // Send the data to HDFS datanodes and close the HDFS writer log.sync(); http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java index 0c68fc1..9ab7b7d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java @@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.regionserver.wal; import static org.junit.Assert.assertFalse; import java.io.IOException; +import java.util.NavigableMap; +import java.util.TreeMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -139,8 +141,13 @@ public class TestLogRollingNoCluster { edit.add(new KeyValue(bytes, bytes, bytes, now, EMPTY_1K_ARRAY)); final HRegionInfo hri = HRegionInfo.FIRST_META_REGIONINFO; final HTableDescriptor htd = TEST_UTIL.getMetaTableDescriptor(); - final long txid = wal.append(htd, hri, new WALKey(hri.getEncodedNameAsBytes(), - TableName.META_TABLE_NAME, now, mvcc), edit, true); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } + final long txid = wal.append(hri, new WALKey(hri.getEncodedNameAsBytes(), + TableName.META_TABLE_NAME, now, mvcc, scopes), edit, true); wal.sync(txid); } String msg = getName() + " finished"; http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java index a2c387b..b6bb7a0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java @@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.regionserver.wal; import java.util.ArrayList; import java.util.List; +import java.util.NavigableMap; +import java.util.TreeMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -98,9 +100,13 @@ public class TestWALActionsListener { edit.add(kv); HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(SOME_BYTES)); htd.addFamily(new HColumnDescriptor(b)); - - final long txid = wal.append(htd, hri, new WALKey(hri.getEncodedNameAsBytes(), - TableName.valueOf(b), 0), edit, true); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } + final long txid = wal.append(hri, new WALKey(hri.getEncodedNameAsBytes(), + TableName.valueOf(b), 0, scopes), edit, true); wal.sync(txid); if (i == 10) { wal.registerWALActionsListener(laterobserver); http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java index dbc06ff..3e894d7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java @@ -37,7 +37,9 @@ import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; +import java.util.NavigableMap; import java.util.Set; +import java.util.TreeMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -308,9 +310,14 @@ public class TestWALReplay { // Add 1k to each family. final int countPerFamily = 1000; + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } for (HColumnDescriptor hcd: htd.getFamilies()) { addWALEdits(tableName, hri, rowName, hcd.getName(), countPerFamily, ee, - wal1, htd, mvcc); + wal1, htd, mvcc, scopes); } wal1.shutdown(); runWALSplit(this.conf); @@ -319,7 +326,7 @@ public class TestWALReplay { // Add 1k to each family. for (HColumnDescriptor hcd: htd.getFamilies()) { addWALEdits(tableName, hri, rowName, hcd.getName(), countPerFamily, - ee, wal2, htd, mvcc); + ee, wal2, htd, mvcc, scopes); } wal2.shutdown(); runWALSplit(this.conf); @@ -800,9 +807,14 @@ public class TestWALReplay { // Add 1k to each family. final int countPerFamily = 1000; Set familyNames = new HashSet(); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } for (HColumnDescriptor hcd: htd.getFamilies()) { addWALEdits(tableName, hri, rowName, hcd.getName(), countPerFamily, - ee, wal, htd, mvcc); + ee, wal, htd, mvcc, scopes); familyNames.add(hcd.getName()); } @@ -815,13 +827,15 @@ public class TestWALReplay { long now = ee.currentTime(); edit.add(new KeyValue(rowName, Bytes.toBytes("another family"), rowName, now, rowName)); - wal.append(htd, hri, new WALKey(hri.getEncodedNameAsBytes(), tableName, now, mvcc), edit, true); + wal.append(hri, new WALKey(hri.getEncodedNameAsBytes(), tableName, now, mvcc, scopes), edit, + true); // Delete the c family to verify deletes make it over. edit = new WALEdit(); now = ee.currentTime(); edit.add(new KeyValue(rowName, Bytes.toBytes("c"), null, now, KeyValue.Type.DeleteFamily)); - wal.append(htd, hri, new WALKey(hri.getEncodedNameAsBytes(), tableName, now, mvcc), edit, true); + wal.append(hri, new WALKey(hri.getEncodedNameAsBytes(), tableName, now, mvcc, scopes), edit, + true); // Sync. wal.sync(); @@ -1046,12 +1060,16 @@ public class TestWALReplay { deleteDir(basedir); final HTableDescriptor htd = createBasic1FamilyHTD(tableName); + NavigableMap scopes = new TreeMap(Bytes.BYTES_COMPARATOR); + for (byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } HRegion region = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); HBaseTestingUtility.closeRegionAndWAL(region); final byte[] family = htd.getColumnFamilies()[0].getName(); final byte[] rowName = tableName.getName(); - FSWALEntry entry1 = createFSWALEntry(htd, hri, 1L, rowName, family, ee, mvcc, 1); - FSWALEntry entry2 = createFSWALEntry(htd, hri, 2L, rowName, family, ee, mvcc, 2); + FSWALEntry entry1 = createFSWALEntry(htd, hri, 1L, rowName, family, ee, mvcc, 1, scopes); + FSWALEntry entry2 = createFSWALEntry(htd, hri, 2L, rowName, family, ee, mvcc, 2, scopes); Path largeFile = new Path(logDir, "wal-1"); Path smallFile = new Path(logDir, "wal-2"); @@ -1154,8 +1172,8 @@ public class TestWALReplay { } private WALKey createWALKey(final TableName tableName, final HRegionInfo hri, - final MultiVersionConcurrencyControl mvcc) { - return new WALKey(hri.getEncodedNameAsBytes(), tableName, 999, mvcc); + final MultiVersionConcurrencyControl mvcc, NavigableMap scopes) { + return new WALKey(hri.getEncodedNameAsBytes(), tableName, 999, mvcc, scopes); } private WALEdit createWALEdit(final byte[] rowName, final byte[] family, EnvironmentEdge ee, @@ -1169,19 +1187,20 @@ public class TestWALReplay { private FSWALEntry createFSWALEntry(HTableDescriptor htd, HRegionInfo hri, long sequence, byte[] rowName, byte[] family, EnvironmentEdge ee, MultiVersionConcurrencyControl mvcc, - int index) throws IOException { + int index, NavigableMap scopes) throws IOException { FSWALEntry entry = - new FSWALEntry(sequence, createWALKey(htd.getTableName(), hri, mvcc), createWALEdit( - rowName, family, ee, index), htd, hri, true); + new FSWALEntry(sequence, createWALKey(htd.getTableName(), hri, mvcc, scopes), createWALEdit( + rowName, family, ee, index), hri, true); entry.stampRegionSequenceId(); return entry; } private void addWALEdits(final TableName tableName, final HRegionInfo hri, final byte[] rowName, final byte[] family, final int count, EnvironmentEdge ee, final WAL wal, - final HTableDescriptor htd, final MultiVersionConcurrencyControl mvcc) throws IOException { + final HTableDescriptor htd, final MultiVersionConcurrencyControl mvcc, + NavigableMap scopes) throws IOException { for (int j = 0; j < count; j++) { - wal.append(htd, hri, createWALKey(tableName, hri, mvcc), + wal.append(hri, createWALKey(tableName, hri, mvcc, scopes), createWALEdit(rowName, family, ee, j), true); } wal.sync(); http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java index e52a600..a50bbc5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java @@ -18,6 +18,9 @@ */ package org.apache.hadoop.hbase.replication; +import java.util.NavigableMap; +import java.util.TreeMap; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -63,6 +66,7 @@ public class TestReplicationBase { protected static Table htable1; protected static Table htable2; + protected static NavigableMap scopes; protected static HBaseTestingUtility utility1; protected static HBaseTestingUtility utility2; @@ -140,6 +144,11 @@ public class TestReplicationBase { table.addFamily(fam); fam = new HColumnDescriptor(noRepfamName); table.addFamily(fam); + scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(HColumnDescriptor f : table.getColumnFamilies()) { + scopes.put(f.getName(), f.getScope()); + } Connection connection1 = ConnectionFactory.createConnection(conf1); Connection connection2 = ConnectionFactory.createConnection(conf2); try (Admin admin1 = connection1.getAdmin()) { http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java index ab97238..97ccd33 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java @@ -26,6 +26,8 @@ import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.NavigableMap; +import java.util.TreeMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -658,7 +660,7 @@ public class TestReplicationSmallTests extends TestReplicationBase { HRegionInfo hri = new HRegionInfo(htable1.getName(), HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW); WALEdit edit = WALEdit.createCompaction(hri, compactionDescriptor); - Replication.scopeWALEdits(htable1.getTableDescriptor(), new WALKey(), edit, + Replication.scopeWALEdits(new WALKey(), edit, htable1.getConfiguration(), null); } @@ -767,7 +769,10 @@ public class TestReplicationSmallTests extends TestReplicationBase { HRegion region = utility1.getMiniHBaseCluster().getRegions(tableName).get(0); HRegionInfo hri = region.getRegionInfo(); - + NavigableMap scopes = new TreeMap(Bytes.BYTES_COMPARATOR); + for (byte[] fam : htable1.getTableDescriptor().getFamiliesKeys()) { + scopes.put(fam, 1); + } final MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(); int index = utility1.getMiniHBaseCluster().getServerWith(hri.getRegionName()); WAL wal = utility1.getMiniHBaseCluster().getRegionServer(index).getWAL(region.getRegionInfo()); @@ -778,8 +783,8 @@ public class TestReplicationSmallTests extends TestReplicationBase { long now = EnvironmentEdgeManager.currentTime(); edit.add(new KeyValue(rowName, famName, qualifier, now, value)); - WALKey walKey = new WALKey(hri.getEncodedNameAsBytes(), tableName, now, mvcc); - wal.append(htable1.getTableDescriptor(), hri, walKey, edit, true); + WALKey walKey = new WALKey(hri.getEncodedNameAsBytes(), tableName, now, mvcc, scopes); + wal.append(hri, walKey, edit, true); wal.sync(); Get get = new Get(rowName); http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java index 22c421d..c906d6a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java @@ -58,19 +58,19 @@ public class TestReplicationWALEntryFilters { // meta WALKey key1 = new WALKey( HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes(), - TableName.META_TABLE_NAME); + TableName.META_TABLE_NAME, null); Entry metaEntry = new Entry(key1, null); assertNull(filter.filter(metaEntry)); // ns table - WALKey key2 = new WALKey(new byte[] {}, TableName.NAMESPACE_TABLE_NAME); + WALKey key2 = new WALKey(new byte[] {}, TableName.NAMESPACE_TABLE_NAME, null); Entry nsEntry = new Entry(key2, null); assertNull(filter.filter(nsEntry)); // user table - WALKey key3 = new WALKey(new byte[] {}, TableName.valueOf("foo")); + WALKey key3 = new WALKey(new byte[] {}, TableName.valueOf("foo"), null); Entry userEntry = new Entry(key3, null); assertEquals(userEntry, filter.filter(userEntry)); @@ -80,33 +80,30 @@ public class TestReplicationWALEntryFilters { public void testScopeWALEntryFilter() { ScopeWALEntryFilter filter = new ScopeWALEntryFilter(); - Entry userEntry = createEntry(a, b); - Entry userEntryA = createEntry(a); - Entry userEntryB = createEntry(b); - Entry userEntryEmpty = createEntry(); + Entry userEntry = createEntry(null, a, b); + Entry userEntryA = createEntry(null, a); + Entry userEntryB = createEntry(null, b); + Entry userEntryEmpty = createEntry(null); // no scopes assertEquals(null, filter.filter(userEntry)); // empty scopes TreeMap scopes = new TreeMap(Bytes.BYTES_COMPARATOR); - userEntry = createEntry(a, b); - userEntry.getKey().setScopes(scopes); + userEntry = createEntry(scopes, a, b); assertEquals(null, filter.filter(userEntry)); // different scope scopes = new TreeMap(Bytes.BYTES_COMPARATOR); scopes.put(c, HConstants.REPLICATION_SCOPE_GLOBAL); - userEntry = createEntry(a, b); - userEntry.getKey().setScopes(scopes); + userEntry = createEntry(scopes, a, b); // all kvs should be filtered assertEquals(userEntryEmpty, filter.filter(userEntry)); // local scope scopes = new TreeMap(Bytes.BYTES_COMPARATOR); scopes.put(a, HConstants.REPLICATION_SCOPE_LOCAL); - userEntry = createEntry(a, b); - userEntry.getKey().setScopes(scopes); + userEntry = createEntry(scopes, a, b); assertEquals(userEntryEmpty, filter.filter(userEntry)); scopes.put(b, HConstants.REPLICATION_SCOPE_LOCAL); assertEquals(userEntryEmpty, filter.filter(userEntry)); @@ -114,8 +111,7 @@ public class TestReplicationWALEntryFilters { // only scope a scopes = new TreeMap(Bytes.BYTES_COMPARATOR); scopes.put(a, HConstants.REPLICATION_SCOPE_GLOBAL); - userEntry = createEntry(a, b); - userEntry.getKey().setScopes(scopes); + userEntry = createEntry(scopes, a, b); assertEquals(userEntryA, filter.filter(userEntry)); scopes.put(b, HConstants.REPLICATION_SCOPE_LOCAL); assertEquals(userEntryA, filter.filter(userEntry)); @@ -123,8 +119,7 @@ public class TestReplicationWALEntryFilters { // only scope b scopes = new TreeMap(Bytes.BYTES_COMPARATOR); scopes.put(b, HConstants.REPLICATION_SCOPE_GLOBAL); - userEntry = createEntry(a, b); - userEntry.getKey().setScopes(scopes); + userEntry = createEntry(scopes, a, b); assertEquals(userEntryB, filter.filter(userEntry)); scopes.put(a, HConstants.REPLICATION_SCOPE_LOCAL); assertEquals(userEntryB, filter.filter(userEntry)); @@ -132,8 +127,7 @@ public class TestReplicationWALEntryFilters { // scope a and b scopes = new TreeMap(Bytes.BYTES_COMPARATOR); scopes.put(b, HConstants.REPLICATION_SCOPE_GLOBAL); - userEntry = createEntry(a, b); - userEntry.getKey().setScopes(scopes); + userEntry = createEntry(scopes, a, b); assertEquals(userEntryB, filter.filter(userEntry)); scopes.put(a, HConstants.REPLICATION_SCOPE_LOCAL); assertEquals(userEntryB, filter.filter(userEntry)); @@ -155,16 +149,16 @@ public class TestReplicationWALEntryFilters { @Test public void testChainWALEntryFilter() { - Entry userEntry = createEntry(a, b, c); + Entry userEntry = createEntry(null, a, b, c); ChainWALEntryFilter filter = new ChainWALEntryFilter(passFilter); - assertEquals(createEntry(a,b,c), filter.filter(userEntry)); + assertEquals(createEntry(null, a,b,c), filter.filter(userEntry)); filter = new ChainWALEntryFilter(passFilter, passFilter); - assertEquals(createEntry(a,b,c), filter.filter(userEntry)); + assertEquals(createEntry(null, a,b,c), filter.filter(userEntry)); filter = new ChainWALEntryFilter(passFilter, passFilter, passFilter); - assertEquals(createEntry(a,b,c), filter.filter(userEntry)); + assertEquals(createEntry(null, a,b,c), filter.filter(userEntry)); filter = new ChainWALEntryFilter(nullFilter); assertEquals(null, filter.filter(userEntry)); @@ -189,7 +183,7 @@ public class TestReplicationWALEntryFilters { new ChainWALEntryFilter(passFilter), new ChainWALEntryFilter(passFilter)), new ChainWALEntryFilter(passFilter)); - assertEquals(createEntry(a,b,c), filter.filter(userEntry)); + assertEquals(createEntry(null, a,b,c), filter.filter(userEntry)); filter = @@ -206,19 +200,19 @@ public class TestReplicationWALEntryFilters { ReplicationPeer peer = mock(ReplicationPeer.class); when(peer.getTableCFs()).thenReturn(null); - Entry userEntry = createEntry(a, b, c); + Entry userEntry = createEntry(null, a, b, c); TableCfWALEntryFilter filter = new TableCfWALEntryFilter(peer); - assertEquals(createEntry(a,b,c), filter.filter(userEntry)); + assertEquals(createEntry(null, a,b,c), filter.filter(userEntry)); // empty map - userEntry = createEntry(a, b, c); + userEntry = createEntry(null, a, b, c); Map> tableCfs = new HashMap>(); when(peer.getTableCFs()).thenReturn(tableCfs); filter = new TableCfWALEntryFilter(peer); assertEquals(null, filter.filter(userEntry)); // table bar - userEntry = createEntry(a, b, c); + userEntry = createEntry(null, a, b, c); tableCfs = new HashMap>(); tableCfs.put(TableName.valueOf("bar"), null); when(peer.getTableCFs()).thenReturn(tableCfs); @@ -226,24 +220,24 @@ public class TestReplicationWALEntryFilters { assertEquals(null, filter.filter(userEntry)); // table foo:a - userEntry = createEntry(a, b, c); + userEntry = createEntry(null, a, b, c); tableCfs = new HashMap>(); tableCfs.put(TableName.valueOf("foo"), Lists.newArrayList("a")); when(peer.getTableCFs()).thenReturn(tableCfs); filter = new TableCfWALEntryFilter(peer); - assertEquals(createEntry(a), filter.filter(userEntry)); + assertEquals(createEntry(null, a), filter.filter(userEntry)); // table foo:a,c - userEntry = createEntry(a, b, c, d); + userEntry = createEntry(null, a, b, c, d); tableCfs = new HashMap>(); tableCfs.put(TableName.valueOf("foo"), Lists.newArrayList("a", "c")); when(peer.getTableCFs()).thenReturn(tableCfs); filter = new TableCfWALEntryFilter(peer); - assertEquals(createEntry(a,c), filter.filter(userEntry)); + assertEquals(createEntry(null, a,c), filter.filter(userEntry)); } - private Entry createEntry(byte[]... kvs) { - WALKey key1 = new WALKey(new byte[] {}, TableName.valueOf("foo")); + private Entry createEntry(TreeMap scopes, byte[]... kvs) { + WALKey key1 = new WALKey(new byte[] {}, TableName.valueOf("foo"), scopes); WALEdit edit1 = new WALEdit(); for (byte[] kv : kvs) { http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java index f042a8d..fb8cfa0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java @@ -32,6 +32,7 @@ import java.util.Map; import java.util.NavigableMap; import java.util.SortedMap; import java.util.SortedSet; +import java.util.TreeMap; import java.util.TreeSet; import java.util.UUID; import java.util.concurrent.CountDownLatch; @@ -131,6 +132,7 @@ public class TestReplicationSourceManager { private static CountDownLatch latch; private static List files = new ArrayList(); + private static NavigableMap scopes; @BeforeClass public static void setUpBeforeClass() throws Exception { @@ -177,6 +179,11 @@ public class TestReplicationSourceManager { col.setScope(HConstants.REPLICATION_SCOPE_LOCAL); htd.addFamily(col); + scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } hri = new HRegionInfo(htd.getTableName(), r1, r2); } @@ -214,15 +221,20 @@ public class TestReplicationSourceManager { manager.init(); HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("tableame")); htd.addFamily(new HColumnDescriptor(f1)); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } // Testing normal log rolling every 20 for(long i = 1; i < 101; i++) { if(i > 1 && i % 20 == 0) { wal.rollWriter(); } LOG.info(i); - final long txid = wal.append(htd, + final long txid = wal.append( hri, - new WALKey(hri.getEncodedNameAsBytes(), test, System.currentTimeMillis(), mvcc), + new WALKey(hri.getEncodedNameAsBytes(), test, System.currentTimeMillis(), mvcc, scopes), edit, true); wal.sync(txid); @@ -236,8 +248,8 @@ public class TestReplicationSourceManager { LOG.info(baseline + " and " + time); for (int i = 0; i < 3; i++) { - wal.append(htd, hri, - new WALKey(hri.getEncodedNameAsBytes(), test, System.currentTimeMillis(), mvcc), + wal.append(hri, + new WALKey(hri.getEncodedNameAsBytes(), test, System.currentTimeMillis(), mvcc, scopes), edit, true); } @@ -254,8 +266,8 @@ public class TestReplicationSourceManager { manager.logPositionAndCleanOldLogs(manager.getSources().get(0).getCurrentPath(), "1", 0, false, false); - wal.append(htd, hri, - new WALKey(hri.getEncodedNameAsBytes(), test, System.currentTimeMillis(), mvcc), + wal.append(hri, + new WALKey(hri.getEncodedNameAsBytes(), test, System.currentTimeMillis(), mvcc, scopes), edit, true); wal.sync(); @@ -427,33 +439,35 @@ public class TestReplicationSourceManager { @Test public void testBulkLoadWALEditsWithoutBulkLoadReplicationEnabled() throws Exception { - // 1. Create wal key - WALKey logKey = new WALKey(); - // 2. Get the bulk load wal edit event - WALEdit logEdit = getBulkLoadWALEdit(); + NavigableMap scope = new TreeMap(Bytes.BYTES_COMPARATOR); + // 1. Get the bulk load wal edit event + WALEdit logEdit = getBulkLoadWALEdit(scope); + // 2. Create wal key + WALKey logKey = new WALKey(scope); // 3. Get the scopes for the key - Replication.scopeWALEdits(htd, logKey, logEdit, conf, manager); + Replication.scopeWALEdits(logKey, logEdit, conf, manager); // 4. Assert that no bulk load entry scopes are added if bulk load hfile replication is disabled - assertNull("No bulk load entries scope should be added if bulk load replication is diabled.", - logKey.getScopes()); + assertNull("No bulk load entries scope should be added if bulk load replication is disabled.", + logKey.getReplicationScopes()); } @Test public void testBulkLoadWALEdits() throws Exception { - // 1. Create wal key - WALKey logKey = new WALKey(); - // 2. Get the bulk load wal edit event - WALEdit logEdit = getBulkLoadWALEdit(); + // 1. Get the bulk load wal edit event + NavigableMap scope = new TreeMap(Bytes.BYTES_COMPARATOR); + WALEdit logEdit = getBulkLoadWALEdit(scope); + // 2. Create wal key + WALKey logKey = new WALKey(scope); // 3. Enable bulk load hfile replication Configuration bulkLoadConf = HBaseConfiguration.create(conf); bulkLoadConf.setBoolean(HConstants.REPLICATION_BULKLOAD_ENABLE_KEY, true); // 4. Get the scopes for the key - Replication.scopeWALEdits(htd, logKey, logEdit, bulkLoadConf, manager); + Replication.scopeWALEdits(logKey, logEdit, bulkLoadConf, manager); - NavigableMap scopes = logKey.getScopes(); + NavigableMap scopes = logKey.getReplicationScopes(); // Assert family with replication scope global is present in the key scopes assertTrue("This family scope is set to global, should be part of replication key scopes.", scopes.containsKey(f1)); @@ -462,17 +476,16 @@ public class TestReplicationSourceManager { scopes.containsKey(f2)); } - private WALEdit getBulkLoadWALEdit() { + private WALEdit getBulkLoadWALEdit(NavigableMap scope) { // 1. Create store files for the families Map> storeFiles = new HashMap<>(1); List p = new ArrayList<>(1); p.add(new Path(Bytes.toString(f1))); storeFiles.put(f1, p); - + scope.put(f1, 1); p = new ArrayList<>(1); p.add(new Path(Bytes.toString(f2))); storeFiles.put(f2, p); - // 2. Create bulk load descriptor BulkLoadDescriptor desc = ProtobufUtil.toBulkLoadDescriptor(hri.getTable(), ByteStringer.wrap(hri.getEncodedNameAsBytes()), storeFiles, 1); http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationWALReaderManager.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationWALReaderManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationWALReaderManager.java index 2ad34ea..3ef658f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationWALReaderManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationWALReaderManager.java @@ -28,6 +28,8 @@ import java.util.ArrayList; import java.util.Collection; import java.util.concurrent.atomic.AtomicLong; import java.util.List; +import java.util.NavigableMap; +import java.util.TreeMap; import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.conf.Configuration; @@ -75,6 +77,7 @@ public class TestReplicationWALReaderManager { private static final HRegionInfo info = new HRegionInfo(tableName, HConstants.EMPTY_START_ROW, HConstants.LAST_ROW, false); private static final HTableDescriptor htd = new HTableDescriptor(tableName); + private static NavigableMap scopes; private WAL log; private ReplicationWALReaderManager logManager; @@ -123,6 +126,11 @@ public class TestReplicationWALReaderManager { cluster = TEST_UTIL.getDFSCluster(); fs = cluster.getFileSystem(); + scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } } @AfterClass @@ -204,9 +212,8 @@ public class TestReplicationWALReaderManager { } private void appendToLogPlus(int count) throws IOException { - final long txid = log.append(htd, info, - new WALKey(info.getEncodedNameAsBytes(), tableName, System.currentTimeMillis(), mvcc), - getWALEdits(count), true); + final long txid = log.append(info, new WALKey(info.getEncodedNameAsBytes(), tableName, + System.currentTimeMillis(), mvcc, scopes), getWALEdits(count), true); log.sync(txid); } http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/FaultyFSLog.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/FaultyFSLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/FaultyFSLog.java index 6eac388..79b94cf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/FaultyFSLog.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/FaultyFSLog.java @@ -26,7 +26,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HRegionInfo; -import org.apache.hadoop.hbase.HTableDescriptor; // imports for things that haven't moved yet import org.apache.hadoop.hbase.regionserver.wal.FSHLog; @@ -60,12 +59,12 @@ public class FaultyFSLog extends FSHLog { } @Override - public long append(HTableDescriptor htd, HRegionInfo info, WALKey key, WALEdit edits, - boolean inMemstore) throws IOException { + public long append(HRegionInfo info, WALKey key, + WALEdit edits, boolean inMemstore) throws IOException { if (this.ft == FailureType.APPEND) { throw new IOException("append"); } - return super.append(htd, info, key, edits, inMemstore); + return super.append(info, key, edits, inMemstore); } } http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDefaultWALProvider.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDefaultWALProvider.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDefaultWALProvider.java index 89c63a6..9b6ac54 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDefaultWALProvider.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDefaultWALProvider.java @@ -25,8 +25,10 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.HashSet; +import java.util.NavigableMap; import java.util.Random; import java.util.Set; +import java.util.TreeMap; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.logging.Log; @@ -151,23 +153,25 @@ public class TestDefaultWALProvider { protected void addEdits(WAL log, HRegionInfo hri, HTableDescriptor htd, - int times) throws IOException { + int times, NavigableMap scopes) throws IOException { final byte[] row = Bytes.toBytes("row"); for (int i = 0; i < times; i++) { long timestamp = System.currentTimeMillis(); WALEdit cols = new WALEdit(); cols.add(new KeyValue(row, row, row, timestamp, row)); - log.append(htd, hri, getWalKey(hri.getEncodedNameAsBytes(), htd.getTableName(), timestamp), - cols, true); + log.append(hri, getWalKey(hri.getEncodedNameAsBytes(), htd.getTableName(), timestamp, scopes), + cols, true); } log.sync(); } /** * used by TestDefaultWALProviderWithHLogKey + * @param scopes */ - WALKey getWalKey(final byte[] info, final TableName tableName, final long timestamp) { - return new WALKey(info, tableName, timestamp, mvcc); + WALKey getWalKey(final byte[] info, final TableName tableName, final long timestamp, + NavigableMap scopes) { + return new WALKey(info, tableName, timestamp, mvcc, scopes); } /** @@ -191,6 +195,16 @@ public class TestDefaultWALProvider { final HTableDescriptor htd2 = new HTableDescriptor(TableName.valueOf("testLogCleaning2")) .addFamily(new HColumnDescriptor("row")); + NavigableMap scopes1 = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes1.put(fam, 0); + } + NavigableMap scopes2 = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd2.getFamiliesKeys()) { + scopes2.put(fam, 0); + } final Configuration localConf = new Configuration(conf); localConf.set(WALFactory.WAL_PROVIDER, DefaultWALProvider.class.getName()); final WALFactory wals = new WALFactory(localConf, null, currentTest.getMethodName()); @@ -205,26 +219,26 @@ public class TestDefaultWALProvider { // Add a single edit and make sure that rolling won't remove the file // Before HBASE-3198 it used to delete it - addEdits(log, hri, htd, 1); + addEdits(log, hri, htd, 1, scopes1); log.rollWriter(); assertEquals(1, DefaultWALProvider.getNumRolledLogFiles(log)); // See if there's anything wrong with more than 1 edit - addEdits(log, hri, htd, 2); + addEdits(log, hri, htd, 2, scopes1); log.rollWriter(); assertEquals(2, DefaultWALProvider.getNumRolledLogFiles(log)); // Now mix edits from 2 regions, still no flushing - addEdits(log, hri, htd, 1); - addEdits(log, hri2, htd2, 1); - addEdits(log, hri, htd, 1); - addEdits(log, hri2, htd2, 1); + addEdits(log, hri, htd, 1, scopes1); + addEdits(log, hri2, htd2, 1, scopes2); + addEdits(log, hri, htd, 1, scopes1); + addEdits(log, hri2, htd2, 1, scopes2); log.rollWriter(); assertEquals(3, DefaultWALProvider.getNumRolledLogFiles(log)); // Flush the first region, we expect to see the first two files getting // archived. We need to append something or writer won't be rolled. - addEdits(log, hri2, htd2, 1); + addEdits(log, hri2, htd2, 1, scopes2); log.startCacheFlush(hri.getEncodedNameAsBytes(), htd.getFamiliesKeys()); log.completeCacheFlush(hri.getEncodedNameAsBytes()); log.rollWriter(); @@ -233,7 +247,7 @@ public class TestDefaultWALProvider { // Flush the second region, which removes all the remaining output files // since the oldest was completely flushed and the two others only contain // flush information - addEdits(log, hri2, htd2, 1); + addEdits(log, hri2, htd2, 1, scopes2); log.startCacheFlush(hri2.getEncodedNameAsBytes(), htd2.getFamiliesKeys()); log.completeCacheFlush(hri2.getEncodedNameAsBytes()); log.rollWriter(); @@ -264,6 +278,16 @@ public class TestDefaultWALProvider { new HTableDescriptor(TableName.valueOf("t1")).addFamily(new HColumnDescriptor("row")); HTableDescriptor table2 = new HTableDescriptor(TableName.valueOf("t2")).addFamily(new HColumnDescriptor("row")); + NavigableMap scopes1 = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : table1.getFamiliesKeys()) { + scopes1.put(fam, 0); + } + NavigableMap scopes2 = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : table2.getFamiliesKeys()) { + scopes2.put(fam, 0); + } final Configuration localConf = new Configuration(conf); localConf.set(WALFactory.WAL_PROVIDER, DefaultWALProvider.class.getName()); final WALFactory wals = new WALFactory(localConf, null, currentTest.getMethodName()); @@ -281,31 +305,31 @@ public class TestDefaultWALProvider { hri2.setSplit(false); // variables to mock region sequenceIds. // start with the testing logic: insert a waledit, and roll writer - addEdits(wal, hri1, table1, 1); + addEdits(wal, hri1, table1, 1, scopes1); wal.rollWriter(); // assert that the wal is rolled assertEquals(1, DefaultWALProvider.getNumRolledLogFiles(wal)); // add edits in the second wal file, and roll writer. - addEdits(wal, hri1, table1, 1); + addEdits(wal, hri1, table1, 1, scopes1); wal.rollWriter(); // assert that the wal is rolled assertEquals(2, DefaultWALProvider.getNumRolledLogFiles(wal)); // add a waledit to table1, and flush the region. - addEdits(wal, hri1, table1, 3); + addEdits(wal, hri1, table1, 3, scopes1); flushRegion(wal, hri1.getEncodedNameAsBytes(), table1.getFamiliesKeys()); // roll log; all old logs should be archived. wal.rollWriter(); assertEquals(0, DefaultWALProvider.getNumRolledLogFiles(wal)); // add an edit to table2, and roll writer - addEdits(wal, hri2, table2, 1); + addEdits(wal, hri2, table2, 1, scopes2); wal.rollWriter(); assertEquals(1, DefaultWALProvider.getNumRolledLogFiles(wal)); // add edits for table1, and roll writer - addEdits(wal, hri1, table1, 2); + addEdits(wal, hri1, table1, 2, scopes1); wal.rollWriter(); assertEquals(2, DefaultWALProvider.getNumRolledLogFiles(wal)); // add edits for table2, and flush hri1. - addEdits(wal, hri2, table2, 2); + addEdits(wal, hri2, table2, 2, scopes2); flushRegion(wal, hri1.getEncodedNameAsBytes(), table2.getFamiliesKeys()); // the log : region-sequenceId map is // log1: region2 (unflushed) @@ -315,7 +339,7 @@ public class TestDefaultWALProvider { wal.rollWriter(); assertEquals(2, DefaultWALProvider.getNumRolledLogFiles(wal)); // flush region2, and all logs should be archived. - addEdits(wal, hri2, table2, 2); + addEdits(wal, hri2, table2, 2, scopes2); flushRegion(wal, hri2.getEncodedNameAsBytes(), table2.getFamiliesKeys()); wal.rollWriter(); assertEquals(0, DefaultWALProvider.getNumRolledLogFiles(wal)); http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDefaultWALProviderWithHLogKey.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDefaultWALProviderWithHLogKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDefaultWALProviderWithHLogKey.java index 1885d87..ef92768 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDefaultWALProviderWithHLogKey.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDefaultWALProviderWithHLogKey.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hbase.wal; +import java.util.NavigableMap; + import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; @@ -28,7 +30,8 @@ import org.apache.hadoop.hbase.regionserver.wal.HLogKey; @Category({RegionServerTests.class, LargeTests.class}) public class TestDefaultWALProviderWithHLogKey extends TestDefaultWALProvider { @Override - WALKey getWalKey(final byte[] info, final TableName tableName, final long timestamp) { - return new HLogKey(info, tableName, timestamp, mvcc); + WALKey getWalKey(final byte[] info, final TableName tableName, final long timestamp, + final NavigableMap scopes) { + return new HLogKey(info, tableName, timestamp, mvcc, scopes); } } http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java index 079e0cb..caa0a45 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java @@ -22,6 +22,8 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.List; +import java.util.NavigableMap; +import java.util.TreeMap; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; @@ -79,6 +81,11 @@ public class TestSecureWAL { TableName tableName = TableName.valueOf("TestSecureWAL"); HTableDescriptor htd = new HTableDescriptor(tableName); htd.addFamily(new HColumnDescriptor(tableName.getName())); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } HRegionInfo regioninfo = new HRegionInfo(tableName, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW, false); final int total = 10; @@ -95,8 +102,8 @@ public class TestSecureWAL { for (int i = 0; i < total; i++) { WALEdit kvs = new WALEdit(); kvs.add(new KeyValue(row, family, Bytes.toBytes(i), value)); - wal.append(htd, regioninfo, new WALKey(regioninfo.getEncodedNameAsBytes(), tableName, - System.currentTimeMillis()), kvs, true); + wal.append(regioninfo, new WALKey(regioninfo.getEncodedNameAsBytes(), tableName, + System.currentTimeMillis(), scopes), kvs, true); } wal.sync(); final Path walPath = DefaultWALProvider.getCurrentFileName(wal); http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java index 747977a..0eef3b1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java @@ -30,6 +30,8 @@ import java.io.IOException; import java.lang.reflect.Method; import java.net.BindException; import java.util.List; +import java.util.NavigableMap; +import java.util.TreeMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -181,6 +183,11 @@ public class TestWALFactory { } HTableDescriptor htd = new HTableDescriptor(tableName); htd.addFamily(new HColumnDescriptor("column")); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } // Add edits for three regions. for (int ii = 0; ii < howmany; ii++) { @@ -196,8 +203,8 @@ public class TestWALFactory { System.currentTimeMillis(), column)); LOG.info("Region " + i + ": " + edit); WALKey walKey = new WALKey(infos[i].getEncodedNameAsBytes(), tableName, - System.currentTimeMillis(), mvcc); - log.append(htd, infos[i], walKey, edit, true); + System.currentTimeMillis(), mvcc, scopes); + log.append(infos[i], walKey, edit, true); walKey.getWriteEntry(); } log.sync(); @@ -249,13 +256,18 @@ public class TestWALFactory { null,null, false); HTableDescriptor htd = new HTableDescriptor(tableName); htd.addFamily(new HColumnDescriptor(tableName.getName())); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } final WAL wal = wals.getWAL(info.getEncodedNameAsBytes(), info.getTable().getNamespace()); for (int i = 0; i < total; i++) { WALEdit kvs = new WALEdit(); kvs.add(new KeyValue(Bytes.toBytes(i), tableName.getName(), tableName.getName())); - wal.append(htd, info, new WALKey(info.getEncodedNameAsBytes(), tableName, - System.currentTimeMillis(), mvcc), kvs, true); + wal.append(info, new WALKey(info.getEncodedNameAsBytes(), tableName, + System.currentTimeMillis(), mvcc, scopes), kvs, true); } // Now call sync and try reading. Opening a Reader before you sync just // gives you EOFE. @@ -273,8 +285,8 @@ public class TestWALFactory { for (int i = 0; i < total; i++) { WALEdit kvs = new WALEdit(); kvs.add(new KeyValue(Bytes.toBytes(i), tableName.getName(), tableName.getName())); - wal.append(htd, info, new WALKey(info.getEncodedNameAsBytes(), tableName, - System.currentTimeMillis(), mvcc), kvs, true); + wal.append(info, new WALKey(info.getEncodedNameAsBytes(), tableName, + System.currentTimeMillis(), mvcc, scopes), kvs, true); } wal.sync(); reader = wals.createReader(fs, walPath); @@ -295,8 +307,8 @@ public class TestWALFactory { for (int i = 0; i < total; i++) { WALEdit kvs = new WALEdit(); kvs.add(new KeyValue(Bytes.toBytes(i), tableName.getName(), value)); - wal.append(htd, info, new WALKey(info.getEncodedNameAsBytes(), tableName, - System.currentTimeMillis(), mvcc), kvs, true); + wal.append(info, new WALKey(info.getEncodedNameAsBytes(), tableName, + System.currentTimeMillis(), mvcc, scopes), kvs, true); } // Now I should have written out lots of blocks. Sync then read. wal.sync(); @@ -370,12 +382,17 @@ public class TestWALFactory { HTableDescriptor htd = new HTableDescriptor(tableName); htd.addFamily(new HColumnDescriptor(tableName.getName())); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } for (int i = 0; i < total; i++) { WALEdit kvs = new WALEdit(); kvs.add(new KeyValue(Bytes.toBytes(i), tableName.getName(), tableName.getName())); - wal.append(htd, regioninfo, new WALKey(regioninfo.getEncodedNameAsBytes(), tableName, - System.currentTimeMillis()), kvs, true); + wal.append(regioninfo, new WALKey(regioninfo.getEncodedNameAsBytes(), tableName, + System.currentTimeMillis(), scopes), kvs, true); } // Now call sync to send the data to HDFS datanodes wal.sync(); @@ -485,6 +502,11 @@ public class TestWALFactory { final HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("tablename")).addFamily(new HColumnDescriptor( "column")); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } final byte [] row = Bytes.toBytes("row"); WAL.Reader reader = null; try { @@ -503,9 +525,9 @@ public class TestWALFactory { row,Bytes.toBytes(Bytes.toString(row) + "1"), false); final WAL log = wals.getWAL(info.getEncodedNameAsBytes(), info.getTable().getNamespace()); - final long txid = log.append(htd, info, + final long txid = log.append(info, new WALKey(info.getEncodedNameAsBytes(), htd.getTableName(), System.currentTimeMillis(), - mvcc), + mvcc, scopes), cols, true); log.sync(txid); log.startCacheFlush(info.getEncodedNameAsBytes(), htd.getFamiliesKeys()); @@ -545,6 +567,11 @@ public class TestWALFactory { final HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("tablename")).addFamily(new HColumnDescriptor( "column")); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } final byte [] row = Bytes.toBytes("row"); WAL.Reader reader = null; final MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(1); @@ -561,9 +588,9 @@ public class TestWALFactory { HRegionInfo hri = new HRegionInfo(htd.getTableName(), HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW); final WAL log = wals.getWAL(hri.getEncodedNameAsBytes(), hri.getTable().getNamespace()); - final long txid = log.append(htd, hri, + final long txid = log.append(hri, new WALKey(hri.getEncodedNameAsBytes(), htd.getTableName(), System.currentTimeMillis(), - mvcc), + mvcc, scopes), cols, true); log.sync(txid); log.startCacheFlush(hri.getEncodedNameAsBytes(), htd.getFamiliesKeys()); @@ -607,7 +634,11 @@ public class TestWALFactory { long timestamp = System.currentTimeMillis(); HTableDescriptor htd = new HTableDescriptor(tableName); htd.addFamily(new HColumnDescriptor("column")); - + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } HRegionInfo hri = new HRegionInfo(tableName, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW); final WAL log = wals.getWAL(hri.getEncodedNameAsBytes(), hri.getTable().getNamespace()); @@ -617,8 +648,8 @@ public class TestWALFactory { cols.add(new KeyValue(row, Bytes.toBytes("column"), Bytes.toBytes(Integer.toString(i)), timestamp, new byte[]{(byte) (i + '0')})); - log.append(htd, hri, new WALKey(hri.getEncodedNameAsBytes(), tableName, - System.currentTimeMillis(), mvcc), cols, true); + log.append(hri, new WALKey(hri.getEncodedNameAsBytes(), tableName, + System.currentTimeMillis(), mvcc, scopes), cols, true); } log.sync(); assertEquals(COL_COUNT, visitor.increments); @@ -627,8 +658,8 @@ public class TestWALFactory { cols.add(new KeyValue(row, Bytes.toBytes("column"), Bytes.toBytes(Integer.toString(11)), timestamp, new byte[]{(byte) (11 + '0')})); - log.append(htd, hri, new WALKey(hri.getEncodedNameAsBytes(), tableName, - System.currentTimeMillis(), mvcc), cols, true); + log.append(hri, new WALKey(hri.getEncodedNameAsBytes(), tableName, + System.currentTimeMillis(), mvcc, scopes), cols, true); log.sync(); assertEquals(COL_COUNT, visitor.increments); } @@ -722,8 +753,9 @@ public class TestWALFactory { } @Override - public void visitLogEntryBeforeWrite(HTableDescriptor htd, WALKey logKey, WALEdit logEdit) { - //To change body of implemented methods use File | Settings | File Templates. + public void visitLogEntryBeforeWrite(WALKey logKey, WALEdit logEdit) { + // To change body of implemented methods use File | Settings | File + // Templates. increments++; } } http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java index 9ae98c6..beac9e2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java @@ -21,6 +21,8 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; +import java.util.NavigableMap; +import java.util.TreeMap; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.LogFactory; @@ -96,6 +98,11 @@ public class TestWALReaderOnSecureWAL { TableName tableName = TableName.valueOf(tblName); HTableDescriptor htd = new HTableDescriptor(tableName); htd.addFamily(new HColumnDescriptor(tableName.getName())); + NavigableMap scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } HRegionInfo regioninfo = new HRegionInfo(tableName, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW, false); final int total = 10; @@ -109,8 +116,8 @@ public class TestWALReaderOnSecureWAL { for (int i = 0; i < total; i++) { WALEdit kvs = new WALEdit(); kvs.add(new KeyValue(row, family, Bytes.toBytes(i), value)); - wal.append(htd, regioninfo, new WALKey(regioninfo.getEncodedNameAsBytes(), tableName, - System.currentTimeMillis(), mvcc), kvs, true); + wal.append(regioninfo, new WALKey(regioninfo.getEncodedNameAsBytes(), tableName, + System.currentTimeMillis(), mvcc, scopes), kvs, true); } wal.sync(); final Path walPath = DefaultWALProvider.getCurrentFileName(wal); http://git-wip-us.apache.org/repos/asf/hbase/blob/8f2bd060/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java index e138174..4a15d3c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java @@ -23,8 +23,10 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.NavigableMap; import java.util.Random; import java.util.Set; +import java.util.TreeMap; import java.util.concurrent.TimeUnit; import org.apache.commons.logging.Log; @@ -128,6 +130,7 @@ public final class WALPerformanceEvaluation extends Configured implements Tool { private final int syncInterval; private final HTableDescriptor htd; private final Sampler loopSampler; + private final NavigableMap scopes; WALPutBenchmark(final HRegion region, final HTableDescriptor htd, final long numIterations, final boolean noSync, final int syncInterval, @@ -138,6 +141,11 @@ public final class WALPerformanceEvaluation extends Configured implements Tool { this.numFamilies = htd.getColumnFamilies().length; this.region = region; this.htd = htd; + scopes = new TreeMap( + Bytes.BYTES_COMPARATOR); + for(byte[] fam : htd.getFamiliesKeys()) { + scopes.put(fam, 0); + } String spanReceivers = getConf().get("hbase.trace.spanreceiver.classes"); if (spanReceivers == null || spanReceivers.isEmpty()) { loopSampler = Sampler.NEVER; @@ -180,8 +188,8 @@ public final class WALPerformanceEvaluation extends Configured implements Tool { addFamilyMapToWALEdit(put.getFamilyCellMap(), walEdit); HRegionInfo hri = region.getRegionInfo(); final WALKey logkey = - new WALKey(hri.getEncodedNameAsBytes(), hri.getTable(), now, mvcc); - wal.append(htd, hri, logkey, walEdit, true); + new WALKey(hri.getEncodedNameAsBytes(), hri.getTable(), now, mvcc, scopes); + wal.append(hri, logkey, walEdit, true); if (!this.noSync) { if (++lastSync >= this.syncInterval) { wal.sync(); @@ -498,8 +506,7 @@ public final class WALPerformanceEvaluation extends Configured implements Tool { private int appends = 0; @Override - public void visitLogEntryBeforeWrite(HTableDescriptor htd, WALKey logKey, - WALEdit logEdit) { + public void visitLogEntryBeforeWrite(WALKey logKey, WALEdit logEdit) { this.appends++; if (this.appends % whenToRoll == 0) { LOG.info("Rolling after " + appends + " edits");