hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jg...@apache.org
Subject svn commit: r1033777 - in /hbase/trunk: ./ src/main/java/org/apache/hadoop/hbase/ src/main/java/org/apache/hadoop/hbase/io/ src/test/java/org/apache/hadoop/hbase/ src/test/java/org/apache/hadoop/hbase/filter/
Date Wed, 10 Nov 2010 23:35:10 GMT
Author: jgray
Date: Wed Nov 10 23:35:09 2010
New Revision: 1033777

URL: http://svn.apache.org/viewvc?rev=1033777&view=rev
Log:
HBASE-3211  Key (Index) Only Fetches

Modified:
    hbase/trunk/CHANGES.txt
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/KeyValue.java
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java

Modified: hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hbase/trunk/CHANGES.txt?rev=1033777&r1=1033776&r2=1033777&view=diff
==============================================================================
--- hbase/trunk/CHANGES.txt (original)
+++ hbase/trunk/CHANGES.txt Wed Nov 10 23:35:09 2010
@@ -1205,6 +1205,7 @@ Release 0.90.0 - Unreleased
    HBASE-3013  Tool to verify data in two clusters
    HBASE-2896  Retain assignment information between cluster
                shutdown/startup
+   HBASE-3211  Key (Index) Only Fetches
 
 
   OPTIMIZATIONS

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/KeyValue.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/KeyValue.java?rev=1033777&r1=1033776&r2=1033777&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/KeyValue.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/KeyValue.java Wed Nov 10 23:35:09 2010
@@ -1189,6 +1189,25 @@ public class KeyValue implements Writabl
   }
 
   /**
+   * Converts this KeyValue to only contain the key portion (the value is
+   * changed to be null).  This method does a full copy of the backing byte
+   * array and does not modify the original byte array of this KeyValue.
+   * <p>
+   * This method is used by {@link KeyOnlyFilter} and is an advanced feature of
+   * KeyValue, proceed with caution.
+   */
+  public void convertToKeyOnly() {
+    // KV format:  <keylen/4><valuelen/4><key/keylen><value/valuelen>
+    // Rebuild as: <keylen/4><0/4><key/keylen>
+    byte [] newBuffer = new byte[getKeyLength() + (2 * Bytes.SIZEOF_INT)];
+    System.arraycopy(this.bytes, this.offset, newBuffer, 0, newBuffer.length);
+    Bytes.putInt(newBuffer, Bytes.SIZEOF_INT, 0);
+    this.bytes = newBuffer;
+    this.offset = 0;
+    this.length = newBuffer.length;
+  }
+
+  /**
    * Splits a column in family:qualifier form into separate byte arrays.
    * <p>
    * Not recommend to be used as this is old-style API.

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java?rev=1033777&r1=1033776&r2=1033777&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java Wed Nov
10 23:35:09 2010
@@ -61,6 +61,7 @@ import org.apache.hadoop.hbase.filter.Co
 import org.apache.hadoop.hbase.filter.DependentColumnFilter;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.filter.InclusiveStopFilter;
+import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
 import org.apache.hadoop.hbase.filter.PageFilter;
 import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.filter.QualifierFilter;
@@ -201,6 +202,8 @@ public class HbaseObjectWritable impleme
 
     addToMap(Increment.class, code++);
 
+    addToMap(KeyOnlyFilter.class, code++);
+
   }
 
   private Class<?> declaredClass;

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java?rev=1033777&r1=1033776&r2=1033777&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java Wed Nov 10 23:35:09
2010
@@ -322,4 +322,34 @@ public class TestKeyValue extends TestCa
     assertKVLess(c, kvA_1, lastOnRowA);
     assertKVLess(c, firstOnRowA, lastOnRowA);
   }
+
+  public void testConvertToKeyOnly() throws Exception {
+    long ts = 1;
+    byte [] value = Bytes.toBytes("a real value");
+    byte [] evalue = new byte[0]; // empty value
+
+    // verify key with a non-empty value works
+    KeyValue kv1 = new KeyValue(rowA, family, qualA, ts, value);
+    KeyValue kv1ko = kv1.clone();
+    assertTrue(kv1.equals(kv1ko));
+    kv1ko.convertToKeyOnly();
+    // keys are still the same
+    assertTrue(kv1.equals(kv1ko));
+    // but values are not
+    assertTrue(kv1.getValue().length != 0);
+    assertTrue(kv1ko.getValue().length == 0);
+
+    // verify key with an already-empty value works
+    KeyValue kv2 = new KeyValue(rowA, family, qualA, ts, evalue);
+    KeyValue kv2ko = kv2.clone();
+    assertTrue(kv2.equals(kv2ko));
+    kv2ko.convertToKeyOnly();
+    // they should still be equal
+    assertTrue(kv2.equals(kv2ko));
+    // but they should have different underlying byte arrays
+    assertFalse(kv2.getBuffer() == kv2ko.getBuffer());
+    // both with 0 length values
+    assertTrue(kv2.getValue().length == 0);
+    assertTrue(kv2ko.getValue().length == 0);
+  }
 }

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java?rev=1033777&r1=1033776&r2=1033777&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java Wed Nov 10 23:35:09
2010
@@ -1255,8 +1255,6 @@ public class TestFilter extends HBaseTes
         " rows", expectedRows, i);
   }
 
-
-
   private void verifyScanNoEarlyOut(Scan s, long expectedRows,
       long expectedKeys)
   throws IOException {
@@ -1314,6 +1312,42 @@ public class TestFilter extends HBaseTes
         kvs.length, idx);
   }
 
+  private void verifyScanFullNoValues(Scan s, KeyValue [] kvs)
+  throws IOException {
+    InternalScanner scanner = this.region.getScanner(s);
+    List<KeyValue> results = new ArrayList<KeyValue>();
+    int row = 0;
+    int idx = 0;
+    for (boolean done = true; done; row++) {
+      done = scanner.next(results);
+      Arrays.sort(results.toArray(new KeyValue[results.size()]),
+          KeyValue.COMPARATOR);
+      if(results.isEmpty()) break;
+      assertTrue("Scanned too many keys! Only expected " + kvs.length +
+          " total but already scanned " + (results.size() + idx) +
+          (results.isEmpty() ? "" : "(" + results.get(0).toString() + ")"),
+          kvs.length >= idx + results.size());
+      for(KeyValue kv : results) {
+        LOG.info("row=" + row + ", result=" + kv.toString() +
+            ", match=" + kvs[idx].toString());
+        assertTrue("Row mismatch",
+            Bytes.equals(kv.getRow(), kvs[idx].getRow()));
+        assertTrue("Family mismatch",
+            Bytes.equals(kv.getFamily(), kvs[idx].getFamily()));
+        assertTrue("Qualifier mismatch",
+            Bytes.equals(kv.getQualifier(), kvs[idx].getQualifier()));
+        assertFalse("Value match (expecting no value in result)",
+            Bytes.equals(kv.getValue(), kvs[idx].getValue()));
+        assertTrue("Value in result is not empty", kv.getValue().length == 0);
+        idx++;
+      }
+      results.clear();
+    }
+    LOG.info("Looked at " + row + " rows with " + idx + " keys");
+    assertEquals("Expected " + kvs.length + " total keys but scanned " + idx,
+        kvs.length, idx);
+  }
+
 
   public void testColumnPaginationFilter() throws Exception {
 
@@ -1406,4 +1440,61 @@ public class TestFilter extends HBaseTes
       verifyScan(s, expectedRows, 0);
       this.verifyScanFull(s, expectedKVs4);
     }
+
+  public void testKeyOnlyFilter() throws Exception {
+
+    // KVs in first 6 rows
+    KeyValue [] expectedKVs = {
+      // testRowOne-0
+      new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+      new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
+      new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+      new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+      new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
+      new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+      // testRowOne-2
+      new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+      new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
+      new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+      new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+      new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
+      new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+      // testRowOne-3
+      new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+      new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
+      new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+      new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+      new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
+      new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+      // testRowTwo-0
+      new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+      new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+      new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+      new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+      new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+      new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+      // testRowTwo-2
+      new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+      new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+      new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+      new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+      new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+      new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+      // testRowTwo-3
+      new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+      new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+      new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+      new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+      new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+      new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1])
+    };
+
+    // Grab all 6 rows
+    long expectedRows = 6;
+    long expectedKeys = this.colsPerRow;
+    Scan s = new Scan();
+    s.setFilter(new KeyOnlyFilter());
+    verifyScan(s, expectedRows, expectedKeys);
+    verifyScanFullNoValues(s, expectedKVs);
+  }
 }



Mime
View raw message