hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zhang...@apache.org
Subject [1/4] hbase git commit: HBASE-16225 Refactor ScanQueryMatcher
Date Tue, 02 Aug 2016 06:08:06 GMT
Repository: hbase
Updated Branches:
  refs/heads/branch-1 c8903cc20 -> dc56aa2d4


http://git-wip-us.apache.org/repos/asf/hbase/blob/dc56aa2d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
index ba4ad3c..30ffe0b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
@@ -28,11 +28,9 @@ import java.util.List;
 import java.util.NavigableSet;
 import java.util.TreeSet;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeepDeletedCells;
 import org.apache.hadoop.hbase.KeyValue;
@@ -44,6 +42,8 @@ import org.apache.hadoop.hbase.util.EnvironmentEdge;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
 import org.junit.experimental.categories.Category;
 
+import junit.framework.TestCase;
+
 // Can't be small as it plays with EnvironmentEdgeManager
 @Category(MediumTests.class)
 public class TestStoreScanner extends TestCase {

http://git-wip-us.apache.org/repos/asf/hbase/blob/dc56aa2d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
index 328f7d9..0de3dbf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
@@ -62,8 +62,8 @@ public class TestCompactor {
     when(r.length()).thenReturn(1L);
     when(r.getBloomFilterType()).thenReturn(BloomType.NONE);
     when(r.getHFileReader()).thenReturn(mock(HFile.Reader.class));
-    when(r.getStoreFileScanner(anyBoolean(), anyBoolean(), anyBoolean(), anyLong(), anyLong()))
-        .thenReturn(mock(StoreFileScanner.class));
+    when(r.getStoreFileScanner(anyBoolean(), anyBoolean(), anyBoolean(), anyLong(), anyLong(),
+      anyBoolean())).thenReturn(mock(StoreFileScanner.class));
     when(sf.getReader()).thenReturn(r);
     when(sf.createReader()).thenReturn(r);
     when(sf.createReader(anyBoolean())).thenReturn(r);

http://git-wip-us.apache.org/repos/asf/hbase/blob/dc56aa2d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
index a9528a9..00fc6a7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
@@ -738,9 +738,8 @@ public class TestStripeCompactionPolicy {
     when(r.length()).thenReturn(size);
     when(r.getBloomFilterType()).thenReturn(BloomType.NONE);
     when(r.getHFileReader()).thenReturn(mock(HFile.Reader.class));
-    when(r.getStoreFileScanner(anyBoolean(), anyBoolean(), anyBoolean(), anyLong(), anyLong()))
-        .thenReturn(
-      mock(StoreFileScanner.class));
+    when(r.getStoreFileScanner(anyBoolean(), anyBoolean(), anyBoolean(), anyLong(), anyLong(),
+      anyBoolean())).thenReturn(mock(StoreFileScanner.class));
     when(sf.getReader()).thenReturn(r);
     when(sf.createReader(anyBoolean())).thenReturn(r);
     when(sf.createReader()).thenReturn(r);

http://git-wip-us.apache.org/repos/asf/hbase/blob/dc56aa2d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/AbstractTestScanQueryMatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/AbstractTestScanQueryMatcher.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/AbstractTestScanQueryMatcher.java
new file mode 100644
index 0000000..db309a9
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/AbstractTestScanQueryMatcher.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver.querymatcher;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Before;
+
+public class AbstractTestScanQueryMatcher {
+
+  protected Configuration conf;
+
+  protected byte[] row1;
+  protected byte[] row2;
+  protected byte[] row3;
+  protected byte[] fam1;
+  protected byte[] fam2;
+  protected byte[] col1;
+  protected byte[] col2;
+  protected byte[] col3;
+  protected byte[] col4;
+  protected byte[] col5;
+
+  protected byte[] data;
+
+  protected Get get;
+
+  protected long ttl = Long.MAX_VALUE;
+  protected KVComparator rowComparator;
+  protected Scan scan;
+
+  @Before
+  public void setUp() throws Exception {
+    this.conf = HBaseConfiguration.create();
+    row1 = Bytes.toBytes("row1");
+    row2 = Bytes.toBytes("row2");
+    row3 = Bytes.toBytes("row3");
+    fam1 = Bytes.toBytes("fam1");
+    fam2 = Bytes.toBytes("fam2");
+    col1 = Bytes.toBytes("col1");
+    col2 = Bytes.toBytes("col2");
+    col3 = Bytes.toBytes("col3");
+    col4 = Bytes.toBytes("col4");
+    col5 = Bytes.toBytes("col5");
+
+    data = Bytes.toBytes("data");
+
+    // Create Get
+    get = new Get(row1);
+    get.addFamily(fam1);
+    get.addColumn(fam2, col2);
+    get.addColumn(fam2, col4);
+    get.addColumn(fam2, col5);
+    this.scan = new Scan(get);
+
+    rowComparator = KeyValue.COMPARATOR;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/dc56aa2d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestCompactionScanQueryMatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestCompactionScanQueryMatcher.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestCompactionScanQueryMatcher.java
new file mode 100644
index 0000000..055fe1c
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestCompactionScanQueryMatcher.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver.querymatcher;
+
+import static org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode.INCLUDE;
+import static org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode.SKIP;
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeepDeletedCells;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValue.Type;
+import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.regionserver.ScanInfo;
+import org.apache.hadoop.hbase.regionserver.ScanType;
+import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({ RegionServerTests.class, SmallTests.class })
+public class TestCompactionScanQueryMatcher extends AbstractTestScanQueryMatcher {
+
+  private static final Log LOG = LogFactory.getLog(TestCompactionScanQueryMatcher.class);
+
+  @Test
+  public void testMatch_PartialRangeDropDeletes() throws Exception {
+    // Some ranges.
+    testDropDeletes(row2, row3, new byte[][] { row1, row2, row2, row3 }, INCLUDE, SKIP, SKIP,
+      INCLUDE);
+    testDropDeletes(row2, row3, new byte[][] { row1, row1, row2 }, INCLUDE, INCLUDE, SKIP);
+    testDropDeletes(row2, row3, new byte[][] { row2, row3, row3 }, SKIP, INCLUDE, INCLUDE);
+    testDropDeletes(row1, row3, new byte[][] { row1, row2, row3 }, SKIP, SKIP, INCLUDE);
+    // Open ranges.
+    testDropDeletes(HConstants.EMPTY_START_ROW, row3, new byte[][] { row1, row2, row3 },
SKIP, SKIP,
+      INCLUDE);
+    testDropDeletes(row2, HConstants.EMPTY_END_ROW, new byte[][] { row1, row2, row3 }, INCLUDE,
+      SKIP, SKIP);
+    testDropDeletes(HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
+      new byte[][] { row1, row2, row3, row3 }, SKIP, SKIP, SKIP, SKIP);
+
+    // No KVs in range.
+    testDropDeletes(row2, row3, new byte[][] { row1, row1, row3 }, INCLUDE, INCLUDE, INCLUDE);
+    testDropDeletes(row2, row3, new byte[][] { row3, row3 }, INCLUDE, INCLUDE);
+    testDropDeletes(row2, row3, new byte[][] { row1, row1 }, INCLUDE, INCLUDE);
+  }
+
+  private void testDropDeletes(byte[] from, byte[] to, byte[][] rows, MatchCode... expected)
+      throws IOException {
+    long now = EnvironmentEdgeManager.currentTime();
+    // Set time to purge deletes to negative value to avoid it ever happening.
+    ScanInfo scanInfo = new ScanInfo(this.conf, fam2, 0, 1, ttl, KeepDeletedCells.FALSE,
-1L,
+        rowComparator);
+
+    CompactionScanQueryMatcher qm = CompactionScanQueryMatcher.create(scanInfo,
+      ScanType.COMPACT_RETAIN_DELETES, Long.MAX_VALUE, HConstants.OLDEST_TIMESTAMP,
+      HConstants.OLDEST_TIMESTAMP, now, from, to, null);
+    List<ScanQueryMatcher.MatchCode> actual = new ArrayList<ScanQueryMatcher.MatchCode>(
+        rows.length);
+    byte[] prevRow = null;
+    for (byte[] row : rows) {
+      if (prevRow == null || !Bytes.equals(prevRow, row)) {
+        qm.setToNewRow(KeyValueUtil.createFirstOnRow(row));
+        prevRow = row;
+      }
+      actual.add(qm.match(new KeyValue(row, fam2, null, now, Type.Delete)));
+    }
+
+    assertEquals(expected.length, actual.size());
+    for (int i = 0; i < expected.length; i++) {
+      LOG.debug("expected " + expected[i] + ", actual " + actual.get(i));
+      assertEquals(expected[i], actual.get(i));
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/dc56aa2d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestExplicitColumnTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestExplicitColumnTracker.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestExplicitColumnTracker.java
new file mode 100644
index 0000000..e571aa0
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestExplicitColumnTracker.java
@@ -0,0 +1,184 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.querymatcher;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.TreeSet;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestExplicitColumnTracker {
+
+  private final byte[] col1 = Bytes.toBytes("col1");
+  private final byte[] col2 = Bytes.toBytes("col2");
+  private final byte[] col3 = Bytes.toBytes("col3");
+  private final byte[] col4 = Bytes.toBytes("col4");
+  private final byte[] col5 = Bytes.toBytes("col5");
+
+  private void runTest(int maxVersions, TreeSet<byte[]> trackColumns, List<byte[]>
scannerColumns,
+      List<MatchCode> expected) throws IOException {
+    ColumnTracker exp = new ExplicitColumnTracker(trackColumns, 0, maxVersions, Long.MIN_VALUE);
+
+    // Initialize result
+    List<ScanQueryMatcher.MatchCode> result = new ArrayList<ScanQueryMatcher.MatchCode>();
+
+    long timestamp = 0;
+    // "Match"
+    for (byte[] col : scannerColumns) {
+      result.add(ScanQueryMatcher.checkColumn(exp, col, 0, col.length, ++timestamp,
+        KeyValue.Type.Put.getCode(), false));
+    }
+
+    assertEquals(expected.size(), result.size());
+    for (int i = 0; i < expected.size(); i++) {
+      assertEquals(expected.get(i), result.get(i));
+    }
+  }
+
+  @Test
+  public void testGetSingleVersion() throws IOException {
+    // Create tracker
+    TreeSet<byte[]> columns = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
+    // Looking for every other
+    columns.add(col2);
+    columns.add(col4);
+    List<MatchCode> expected = new ArrayList<ScanQueryMatcher.MatchCode>();
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL); // col1
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL); // col2
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL); // col3
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_ROW); // col4
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_ROW); // col5
+    int maxVersions = 1;
+
+    // Create "Scanner"
+    List<byte[]> scanner = new ArrayList<byte[]>();
+    scanner.add(col1);
+    scanner.add(col2);
+    scanner.add(col3);
+    scanner.add(col4);
+    scanner.add(col5);
+
+    runTest(maxVersions, columns, scanner, expected);
+  }
+
+  @Test
+  public void testGetMultiVersion() throws IOException {
+    // Create tracker
+    TreeSet<byte[]> columns = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
+    // Looking for every other
+    columns.add(col2);
+    columns.add(col4);
+
+    List<ScanQueryMatcher.MatchCode> expected = new ArrayList<ScanQueryMatcher.MatchCode>();
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL);
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL);
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL);
+
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE); // col2; 1st version
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL); // col2; 2nd version
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL);
+
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL);
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL);
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL);
+
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE); // col4; 1st version
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_ROW); // col4; 2nd version
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_ROW);
+
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_ROW);
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_ROW);
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_ROW);
+    int maxVersions = 2;
+
+    // Create "Scanner"
+    List<byte[]> scanner = new ArrayList<byte[]>();
+    scanner.add(col1);
+    scanner.add(col1);
+    scanner.add(col1);
+    scanner.add(col2);
+    scanner.add(col2);
+    scanner.add(col2);
+    scanner.add(col3);
+    scanner.add(col3);
+    scanner.add(col3);
+    scanner.add(col4);
+    scanner.add(col4);
+    scanner.add(col4);
+    scanner.add(col5);
+    scanner.add(col5);
+    scanner.add(col5);
+
+    // Initialize result
+    runTest(maxVersions, columns, scanner, expected);
+  }
+
+  /**
+   * hbase-2259
+   */
+  @Test
+  public void testStackOverflow() throws IOException {
+    int maxVersions = 1;
+    TreeSet<byte[]> columns = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
+    for (int i = 0; i < 100000; i++) {
+      columns.add(Bytes.toBytes("col" + i));
+    }
+
+    ColumnTracker explicit = new ExplicitColumnTracker(columns, 0, maxVersions, Long.MIN_VALUE);
+    for (int i = 0; i < 100000; i += 2) {
+      byte[] col = Bytes.toBytes("col" + i);
+      ScanQueryMatcher.checkColumn(explicit, col, 0, col.length, 1, KeyValue.Type.Put.getCode(),
+        false);
+    }
+    explicit.reset();
+
+    for (int i = 1; i < 100000; i += 2) {
+      byte[] col = Bytes.toBytes("col" + i);
+      ScanQueryMatcher.checkColumn(explicit, col, 0, col.length, 1, KeyValue.Type.Put.getCode(),
+        false);
+    }
+  }
+
+  /**
+   * Regression test for HBASE-2545
+   */
+  @Test
+  public void testInfiniteLoop() throws IOException {
+    TreeSet<byte[]> columns = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
+    columns.addAll(Arrays.asList(new byte[][] { col2, col3, col5 }));
+    List<byte[]> scanner = Arrays.<byte[]> asList(new byte[][] { col1, col4 });
+    List<ScanQueryMatcher.MatchCode> expected =
+        Arrays.<ScanQueryMatcher.MatchCode> asList(new ScanQueryMatcher.MatchCode[]
{
+            ScanQueryMatcher.MatchCode.SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.SEEK_NEXT_COL
});
+    runTest(1, columns, scanner, expected);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/dc56aa2d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanDeleteTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanDeleteTracker.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanDeleteTracker.java
new file mode 100644
index 0000000..fce35bd
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanDeleteTracker.java
@@ -0,0 +1,185 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.querymatcher;
+
+import static org.junit.Assert.*;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({ RegionServerTests.class, SmallTests.class })
+public class TestScanDeleteTracker {
+
+  private ScanDeleteTracker sdt;
+
+  private long timestamp = 10L;
+
+  @Before
+  public void setUp() throws Exception {
+    sdt = new ScanDeleteTracker();
+  }
+
+  @Test
+  public void testDeletedByDelete() {
+    KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), Bytes.toBytes("qualifier"),
+        timestamp, KeyValue.Type.Delete);
+    sdt.add(kv);
+    DeleteResult ret = sdt.isDeleted(kv);
+    assertEquals(DeleteResult.VERSION_DELETED, ret);
+  }
+
+  @Test
+  public void testDeletedByDeleteColumn() {
+    KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), Bytes.toBytes("qualifier"),
+        timestamp, KeyValue.Type.DeleteColumn);
+    sdt.add(kv);
+    timestamp -= 5;
+    kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), Bytes.toBytes("qualifier"),
+        timestamp, KeyValue.Type.DeleteColumn);
+    DeleteResult ret = sdt.isDeleted(kv);
+    assertEquals(DeleteResult.COLUMN_DELETED, ret);
+  }
+
+  @Test
+  public void testDeletedByDeleteFamily() {
+    KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), Bytes.toBytes("qualifier"),
+        timestamp, KeyValue.Type.DeleteFamily);
+    sdt.add(kv);
+    timestamp -= 5;
+    kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), Bytes.toBytes("qualifier"),
+        timestamp, KeyValue.Type.DeleteColumn);
+    DeleteResult ret = sdt.isDeleted(kv);
+    assertEquals(DeleteResult.FAMILY_DELETED, ret);
+  }
+
+  @Test
+  public void testDeletedByDeleteFamilyVersion() {
+    byte[] qualifier1 = Bytes.toBytes("qualifier1");
+    byte[] qualifier2 = Bytes.toBytes("qualifier2");
+    byte[] qualifier3 = Bytes.toBytes("qualifier3");
+    byte[] qualifier4 = Bytes.toBytes("qualifier4");
+    KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), null, timestamp,
+        KeyValue.Type.DeleteFamilyVersion);
+    sdt.add(kv);
+    kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier1, timestamp,
+        KeyValue.Type.DeleteFamilyVersion);
+    DeleteResult ret = sdt.isDeleted(kv);
+    assertEquals(DeleteResult.FAMILY_VERSION_DELETED, ret);
+    kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier2, timestamp,
+        KeyValue.Type.DeleteFamilyVersion);
+    ret = sdt.isDeleted(kv);
+    assertEquals(DeleteResult.FAMILY_VERSION_DELETED, ret);
+    kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier3, timestamp,
+        KeyValue.Type.DeleteFamilyVersion);
+    ret = sdt.isDeleted(kv);
+    assertEquals(DeleteResult.FAMILY_VERSION_DELETED, ret);
+    kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier4, timestamp,
+        KeyValue.Type.DeleteFamilyVersion);
+    ret = sdt.isDeleted(kv);
+    assertEquals(DeleteResult.FAMILY_VERSION_DELETED, ret);
+    kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier1, timestamp + 3,
+        KeyValue.Type.DeleteFamilyVersion);
+    ret = sdt.isDeleted(kv);
+    assertEquals(DeleteResult.NOT_DELETED, ret);
+    kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier2, timestamp - 2,
+        KeyValue.Type.DeleteFamilyVersion);
+    ret = sdt.isDeleted(kv);
+    assertEquals(DeleteResult.NOT_DELETED, ret);
+    kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier3, timestamp - 5,
+        KeyValue.Type.DeleteFamilyVersion);
+    ret = sdt.isDeleted(kv);
+    assertEquals(DeleteResult.NOT_DELETED, ret);
+    kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier4, timestamp + 8,
+        KeyValue.Type.DeleteFamilyVersion);
+    ret = sdt.isDeleted(kv);
+    assertEquals(DeleteResult.NOT_DELETED, ret);
+  }
+
+  @Test
+  public void testDeleteDeleteColumn() {
+    byte[] qualifier = Bytes.toBytes("qualifier");
+    KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier, timestamp,
+        KeyValue.Type.Delete);
+    sdt.add(kv);
+
+    timestamp -= 5;
+    kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier, timestamp,
+        KeyValue.Type.DeleteColumn);
+    sdt.add(kv);
+
+    timestamp -= 5;
+    kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier, timestamp,
+        KeyValue.Type.DeleteColumn);
+    DeleteResult ret = sdt.isDeleted(kv);
+    assertEquals(DeleteResult.COLUMN_DELETED, ret);
+  }
+
+  @Test
+  public void testDeleteColumnDelete() {
+    byte[] qualifier = Bytes.toBytes("qualifier");
+    KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier, timestamp,
+        KeyValue.Type.DeleteColumn);
+    sdt.add(kv);
+
+    qualifier = Bytes.toBytes("qualifier1");
+    kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier, timestamp,
+        KeyValue.Type.Delete);
+    sdt.add(kv);
+
+    DeleteResult ret = sdt.isDeleted(kv);
+    assertEquals(DeleteResult.VERSION_DELETED, ret);
+  }
+
+  // Testing new way where we save the Delete in case of a Delete for specific
+  // ts, could have just added the last line to the first test, but rather keep
+  // them separated
+  @Test
+  public void testDeleteKeepDelete() {
+    byte[] qualifier = Bytes.toBytes("qualifier");
+    KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier, timestamp,
+        KeyValue.Type.Delete);
+    sdt.add(kv);
+    sdt.isDeleted(kv);
+    assertEquals(false, sdt.isEmpty());
+  }
+
+  @Test
+  public void testDeleteKeepVersionZero() {
+    byte[] qualifier = Bytes.toBytes("qualifier");
+
+    long deleteTimestamp = 10;
+    long valueTimestamp = 0;
+
+    sdt.reset();
+    KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier, deleteTimestamp,
+        KeyValue.Type.Delete);
+    sdt.add(kv);
+    kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), qualifier, valueTimestamp,
+        KeyValue.Type.Delete);
+    DeleteResult ret = sdt.isDeleted(kv);
+    assertEquals(DeleteResult.NOT_DELETED, ret);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/dc56aa2d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanWildcardColumnTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanWildcardColumnTracker.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanWildcardColumnTracker.java
new file mode 100644
index 0000000..01e9b31
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanWildcardColumnTracker.java
@@ -0,0 +1,125 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.querymatcher;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestScanWildcardColumnTracker {
+
+  final static int VERSIONS = 2;
+
+  @Test
+  public void testCheckColumnOk() throws IOException {
+    ScanWildcardColumnTracker tracker = new ScanWildcardColumnTracker(0, VERSIONS, Long.MIN_VALUE);
+
+    // Create list of qualifiers
+    List<byte[]> qualifiers = new ArrayList<byte[]>();
+    qualifiers.add(Bytes.toBytes("qualifier1"));
+    qualifiers.add(Bytes.toBytes("qualifier2"));
+    qualifiers.add(Bytes.toBytes("qualifier3"));
+    qualifiers.add(Bytes.toBytes("qualifier4"));
+
+    // Setting up expected result
+    List<MatchCode> expected = new ArrayList<MatchCode>();
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+
+    List<ScanQueryMatcher.MatchCode> actual = new ArrayList<MatchCode>();
+
+    for (byte[] qualifier : qualifiers) {
+      ScanQueryMatcher.MatchCode mc = ScanQueryMatcher.checkColumn(tracker, qualifier, 0,
+        qualifier.length, 1, KeyValue.Type.Put.getCode(), false);
+      actual.add(mc);
+    }
+
+    // Compare actual with expected
+    for (int i = 0; i < expected.size(); i++) {
+      assertEquals(expected.get(i), actual.get(i));
+    }
+  }
+
+  @Test
+  public void testCheckColumnEnforceVersions() throws IOException {
+    ScanWildcardColumnTracker tracker = new ScanWildcardColumnTracker(0, VERSIONS, Long.MIN_VALUE);
+
+    // Create list of qualifiers
+    List<byte[]> qualifiers = new ArrayList<byte[]>();
+    qualifiers.add(Bytes.toBytes("qualifier1"));
+    qualifiers.add(Bytes.toBytes("qualifier1"));
+    qualifiers.add(Bytes.toBytes("qualifier1"));
+    qualifiers.add(Bytes.toBytes("qualifier2"));
+
+    // Setting up expected result
+    List<ScanQueryMatcher.MatchCode> expected = new ArrayList<MatchCode>();
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+
+    List<MatchCode> actual = new ArrayList<ScanQueryMatcher.MatchCode>();
+
+    long timestamp = 0;
+    for (byte[] qualifier : qualifiers) {
+      MatchCode mc = ScanQueryMatcher.checkColumn(tracker, qualifier, 0, qualifier.length,
+        ++timestamp, KeyValue.Type.Put.getCode(), false);
+      actual.add(mc);
+    }
+
+    // Compare actual with expected
+    for (int i = 0; i < expected.size(); i++) {
+      assertEquals(expected.get(i), actual.get(i));
+    }
+  }
+
+  @Test
+  public void DisabledTestCheckColumnWrongOrder() {
+    ScanWildcardColumnTracker tracker = new ScanWildcardColumnTracker(0, VERSIONS, Long.MIN_VALUE);
+
+    // Create list of qualifiers
+    List<byte[]> qualifiers = new ArrayList<byte[]>();
+    qualifiers.add(Bytes.toBytes("qualifier2"));
+    qualifiers.add(Bytes.toBytes("qualifier1"));
+
+    try {
+      for (byte[] qualifier : qualifiers) {
+        ScanQueryMatcher.checkColumn(tracker, qualifier, 0, qualifier.length, 1,
+          KeyValue.Type.Put.getCode(), false);
+      }
+      fail();
+    } catch (IOException e) {
+      // expected
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/dc56aa2d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java
new file mode 100644
index 0000000..ec38d71
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java
@@ -0,0 +1,236 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver.querymatcher;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeepDeletedCells;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.regionserver.ScanInfo;
+import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({ RegionServerTests.class, SmallTests.class })
+public class TestUserScanQueryMatcher extends AbstractTestScanQueryMatcher {
+
+  private static final Log LOG = LogFactory.getLog(TestUserScanQueryMatcher.class);
+
+  /**
+   * This is a cryptic test. It is checking that we don't include a fake cell, one that has
a
+   * timestamp of {@link HConstants#OLDEST_TIMESTAMP}. See HBASE-16074 for background.
+   * @throws IOException
+   */
+  @Test
+  public void testNeverIncludeFakeCell() throws IOException {
+    long now = EnvironmentEdgeManager.currentTime();
+    // Do with fam2 which has a col2 qualifier.
+    UserScanQueryMatcher qm = UserScanQueryMatcher.create(scan,
+      new ScanInfo(this.conf, fam2, 10, 1, ttl, KeepDeletedCells.FALSE, 0, rowComparator),
+      get.getFamilyMap().get(fam2), now - ttl, now, null);
+    Cell kv = new KeyValue(row1, fam2, col2, 1, data);
+    Cell cell = KeyValueUtil.createLastOnRowCol(kv);
+    qm.setToNewRow(kv);
+    MatchCode code = qm.match(cell);
+    assertFalse(code.compareTo(MatchCode.SEEK_NEXT_COL) != 0);
+  }
+
+  @Test
+  public void testMatchExplicitColumns() throws IOException {
+    // Moving up from the Tracker by using Gets and List<KeyValue> instead
+    // of just byte []
+
+    // Expected result
+    List<MatchCode> expected = new ArrayList<ScanQueryMatcher.MatchCode>();
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL);
+    expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_ROW);
+    expected.add(ScanQueryMatcher.MatchCode.DONE);
+
+    long now = EnvironmentEdgeManager.currentTime();
+    // 2,4,5
+    UserScanQueryMatcher qm = UserScanQueryMatcher.create(scan,
+      new ScanInfo(this.conf, fam2, 0, 1, ttl, KeepDeletedCells.FALSE, 0, rowComparator),
+      get.getFamilyMap().get(fam2), now - ttl, now, null);
+
+    List<KeyValue> memstore = new ArrayList<KeyValue>();
+    memstore.add(new KeyValue(row1, fam2, col1, 1, data));
+    memstore.add(new KeyValue(row1, fam2, col2, 1, data));
+    memstore.add(new KeyValue(row1, fam2, col3, 1, data));
+    memstore.add(new KeyValue(row1, fam2, col4, 1, data));
+    memstore.add(new KeyValue(row1, fam2, col5, 1, data));
+
+    memstore.add(new KeyValue(row2, fam1, col1, data));
+
+    List<ScanQueryMatcher.MatchCode> actual = new ArrayList<ScanQueryMatcher.MatchCode>();
+    KeyValue k = memstore.get(0);
+    qm.setToNewRow(k);
+
+    for (KeyValue kv : memstore) {
+      actual.add(qm.match(kv));
+    }
+
+    assertEquals(expected.size(), actual.size());
+    for (int i = 0; i < expected.size(); i++) {
+      LOG.debug("expected " + expected.get(i) + ", actual " + actual.get(i));
+      assertEquals(expected.get(i), actual.get(i));
+    }
+  }
+
+  @Test
+  public void testMatch_Wildcard() throws IOException {
+    // Moving up from the Tracker by using Gets and List<KeyValue> instead
+    // of just byte []
+
+    // Expected result
+    List<MatchCode> expected = new ArrayList<ScanQueryMatcher.MatchCode>();
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
+    expected.add(ScanQueryMatcher.MatchCode.DONE);
+
+    long now = EnvironmentEdgeManager.currentTime();
+    UserScanQueryMatcher qm = UserScanQueryMatcher.create(scan,
+      new ScanInfo(this.conf, fam2, 0, 1, ttl, KeepDeletedCells.FALSE, 0, rowComparator),
null,
+      now - ttl, now, null);
+
+    List<KeyValue> memstore = new ArrayList<KeyValue>();
+    memstore.add(new KeyValue(row1, fam2, col1, 1, data));
+    memstore.add(new KeyValue(row1, fam2, col2, 1, data));
+    memstore.add(new KeyValue(row1, fam2, col3, 1, data));
+    memstore.add(new KeyValue(row1, fam2, col4, 1, data));
+    memstore.add(new KeyValue(row1, fam2, col5, 1, data));
+    memstore.add(new KeyValue(row2, fam1, col1, 1, data));
+
+    List<ScanQueryMatcher.MatchCode> actual = new ArrayList<ScanQueryMatcher.MatchCode>();
+
+    KeyValue k = memstore.get(0);
+    qm.setToNewRow(k);
+
+    for (KeyValue kv : memstore) {
+      actual.add(qm.match(kv));
+    }
+
+    assertEquals(expected.size(), actual.size());
+    for (int i = 0; i < expected.size(); i++) {
+      LOG.debug("expected " + expected.get(i) + ", actual " + actual.get(i));
+      assertEquals(expected.get(i), actual.get(i));
+    }
+  }
+
+  /**
+   * Verify that {@link ScanQueryMatcher} only skips expired KeyValue instances and does
not exit
+   * early from the row (skipping later non-expired KeyValues). This version mimics a Get
with
+   * explicitly specified column qualifiers.
+   * @throws IOException
+   */
+  @Test
+  public void testMatch_ExpiredExplicit() throws IOException {
+
+    long testTTL = 1000;
+    MatchCode[] expected = new MatchCode[] { ScanQueryMatcher.MatchCode.SEEK_NEXT_COL,
+        ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL,
+        ScanQueryMatcher.MatchCode.SEEK_NEXT_COL,
+        ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL,
+        ScanQueryMatcher.MatchCode.SEEK_NEXT_ROW, ScanQueryMatcher.MatchCode.DONE };
+
+    long now = EnvironmentEdgeManager.currentTime();
+    UserScanQueryMatcher qm = UserScanQueryMatcher.create(scan,
+      new ScanInfo(this.conf, fam2, 0, 1, testTTL, KeepDeletedCells.FALSE, 0, rowComparator),
+      get.getFamilyMap().get(fam2), now - testTTL, now, null);
+
+    KeyValue[] kvs = new KeyValue[] { new KeyValue(row1, fam2, col1, now - 100, data),
+        new KeyValue(row1, fam2, col2, now - 50, data),
+        new KeyValue(row1, fam2, col3, now - 5000, data),
+        new KeyValue(row1, fam2, col4, now - 500, data),
+        new KeyValue(row1, fam2, col5, now - 10000, data),
+        new KeyValue(row2, fam1, col1, now - 10, data) };
+
+    KeyValue k = kvs[0];
+    qm.setToNewRow(k);
+
+    List<MatchCode> actual = new ArrayList<MatchCode>(kvs.length);
+    for (KeyValue kv : kvs) {
+      actual.add(qm.match(kv));
+    }
+
+    assertEquals(expected.length, actual.size());
+    for (int i = 0; i < expected.length; i++) {
+      LOG.debug("expected " + expected[i] + ", actual " + actual.get(i));
+      assertEquals(expected[i], actual.get(i));
+    }
+  }
+
+  /**
+   * Verify that {@link ScanQueryMatcher} only skips expired KeyValue instances and does
not exit
+   * early from the row (skipping later non-expired KeyValues). This version mimics a Get
with
+   * wildcard-inferred column qualifiers.
+   * @throws IOException
+   */
+  @Test
+  public void testMatch_ExpiredWildcard() throws IOException {
+
+    long testTTL = 1000;
+    MatchCode[] expected = new MatchCode[] { ScanQueryMatcher.MatchCode.INCLUDE,
+        ScanQueryMatcher.MatchCode.INCLUDE, ScanQueryMatcher.MatchCode.SEEK_NEXT_COL,
+        ScanQueryMatcher.MatchCode.INCLUDE, ScanQueryMatcher.MatchCode.SEEK_NEXT_COL,
+        ScanQueryMatcher.MatchCode.DONE };
+
+    long now = EnvironmentEdgeManager.currentTime();
+    UserScanQueryMatcher qm = UserScanQueryMatcher.create(scan,
+      new ScanInfo(this.conf, fam2, 0, 1, testTTL, KeepDeletedCells.FALSE, 0, rowComparator),
null,
+      now - testTTL, now, null);
+
+    KeyValue[] kvs = new KeyValue[] { new KeyValue(row1, fam2, col1, now - 100, data),
+        new KeyValue(row1, fam2, col2, now - 50, data),
+        new KeyValue(row1, fam2, col3, now - 5000, data),
+        new KeyValue(row1, fam2, col4, now - 500, data),
+        new KeyValue(row1, fam2, col5, now - 10000, data),
+        new KeyValue(row2, fam1, col1, now - 10, data) };
+    KeyValue k = kvs[0];
+    qm.setToNewRow(k);
+
+    List<ScanQueryMatcher.MatchCode> actual = new ArrayList<ScanQueryMatcher.MatchCode>(kvs.length);
+    for (KeyValue kv : kvs) {
+      actual.add(qm.match(kv));
+    }
+
+    assertEquals(expected.length, actual.size());
+    for (int i = 0; i < expected.length; i++) {
+      LOG.debug("expected " + expected[i] + ", actual " + actual.get(i));
+      assertEquals(expected[i], actual.get(i));
+    }
+  }
+}


Mime
View raw message