hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject svn commit: r787871 - in /hadoop/hbase/trunk_on_hadoop-0.18.3: ./ conf/ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/filter/ src/java/org/apache/hadoop/hbase/master/ src/java/org/apache/hadoop/hbase/regionserver/ src/test/o...
Date Tue, 23 Jun 2009 23:30:38 GMT
Author: apurtell
Date: Tue Jun 23 23:30:37 2009
New Revision: 787871

URL: http://svn.apache.org/viewvc?rev=787871&view=rev
Log:
HBASE-1561, HBASE-1558, HBASE-1508, HBASE-1568, HBASE-1564, HBASE-1532, HBASE-1572

Added:
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/zk.jsp
Modified:
    hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt
    hadoop/hbase/trunk_on_hadoop-0.18.3/conf/log4j.properties
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Delete.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Get.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RowWhileMatchFilter.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/package-info.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/master/BaseScanner.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/master/HMaster.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/master.jsp
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/table.jsp

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt?rev=787871&r1=787870&r2=787871&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt Tue Jun 23 23:30:37 2009
@@ -211,6 +211,15 @@
    HBASE-1545  atomicIncrements creating new values with Long.MAX_VALUE
    HBASE-1547  atomicIncrement doesnt increase hregion.memcacheSize
    HBASE-1553  ClassSize missing in trunk
+   HBASE-1561  HTable Mismatch between javadoc and what it actually does
+   HBASE-1558  deletes use 'HConstants.LATEST_TIMESTAMP' but no one translates
+               that into 'now'
+   HBASE-1508  Shell "close_region" reveals a Master<>HRS problem, regions are
+               not reassigned
+   HBASE-1568  Client doesnt consult old row filter interface in
+               filterSaysStop() - could result in NPE or excessive scanning
+   HBASE-1564  in UI make host addresses all look the same -- not IP sometimes
+               and host at others
 
   IMPROVEMENTS
    HBASE-1089  Add count of regions on filesystem to master UI; add percentage
@@ -380,6 +389,9 @@
    HBASE-1550  hbase-daemon.sh stop should provide more information when stop
                command fails
    HBASE-1515  Address part of config option hbase.regionserver unnecessary
+   HBASE-1532  UI Visibility into ZooKeeper
+   HBASE-1572  Zookeeper log4j property set to ERROR on default, same output
+               when cluster working and not working (Jon Gray via Stack)
 
   OPTIMIZATIONS
    HBASE-1412  Change values for delete column and column family in KeyValue

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/conf/log4j.properties?rev=787871&r1=787870&r2=787871&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/conf/log4j.properties (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/conf/log4j.properties Tue Jun 23 23:30:37 2009
@@ -40,7 +40,7 @@
 
 # Custom Logging levels
 
-log4j.logger.org.apache.zookeeper=ERROR
+log4j.logger.org.apache.zookeeper=INFO
 #log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
 #log4j.logger.org.apache.hadoop.hbase=DEBUG
 #log4j.logger.org.apache.hadoop.dfs=DEBUG

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Delete.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Delete.java?rev=787871&r1=787870&r2=787871&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Delete.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Delete.java
Tue Jun 23 23:30:37 2009
@@ -87,13 +87,16 @@
 
   /**
    * Create a Delete operation for the specified row and timestamp, using
-   * an optional row lock.
-   * <p>
+   * an optional row lock.<p>
+   * 
    * If no further operations are done, this will delete all columns in all
    * families of the specified row with a timestamp less than or equal to the 
-   * specified timestamp.
+   * specified timestamp.<p>
+   * 
+   * This timestamp is ONLY used for a delete row operation.  If specifying 
+   * families or columns, you must specify each timestamp individually.
    * @param row row key
-   * @param timestamp maximum version timestamp
+   * @param timestamp maximum version timestamp (only for delete row)
    * @param rowLock previously acquired row lock, or null
    */
   public Delete(byte [] row, long timestamp, RowLock rowLock) {
@@ -170,6 +173,18 @@
   }
   
   /**
+   * Delete all versions of the specified column, given in 
+   * <code>family:qualifier</code> notation, and with a timestamp less than
+   * or equal to the specified timestamp. 
+   * @param column colon-delimited family and qualifier
+   * @param timestamp maximum version timestamp 
+   */
+  public void deleteColumns(byte [] column, long timestamp) {
+    byte [][] parts = KeyValue.parseColumn(column);
+    this.deleteColumns(parts[0], parts[1], timestamp);
+  }
+  
+  /**
    * Delete the latest version of the specified column.
    * This is an expensive call in that on the server-side, it first does a
    * get to find the latest versions timestamp.  Then it adds a delete using

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Get.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Get.java?rev=787871&r1=787870&r2=787871&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Get.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Get.java Tue
Jun 23 23:30:37 2009
@@ -57,7 +57,7 @@
  * To limit the number of versions of each column to be returned, execute
  * {@link #setMaxVersions(int) setMaxVersions}.
  * <p>
- * To add a filter, execute {@link #setFilter(RowFilterInterface) setFilter}.
+ * To add a filter, execute {@link #setFilter(Filter) setFilter}.
  */
 public class Get implements Writable {
   private byte [] row = null;
@@ -402,4 +402,4 @@
       }
     }
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java?rev=787871&r1=787870&r2=787871&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java
Tue Jun 23 23:30:37 2009
@@ -1411,7 +1411,9 @@
       final RowLock rl)
   throws IOException {
     Delete d = new Delete(row, ts, rl);
-    d.deleteColumn(column);
+    if(column != null) {
+      d.deleteColumns(column, ts);
+    }
     delete(d);
   }
   
@@ -1544,9 +1546,8 @@
   public void deleteFamily(final byte [] row, final byte [] family, 
     final long timestamp, final RowLock rl)
   throws IOException {
-    // Is this right?  LATEST_TS? St.Ack
     Delete d = new Delete(row, HConstants.LATEST_TIMESTAMP, rl);
-    d.deleteFamily(family);
+    d.deleteFamily(stripColon(family), timestamp);
     delete(d);
   }
   
@@ -1865,9 +1866,16 @@
       if(!scan.hasFilter()) {
         return false;
       }
-      // Let the filter see current row.
-      scan.getFilter().filterRowKey(endKey, 0, endKey.length);
-      return scan.getFilter().filterAllRemaining();
+      if (scan.getFilter() != null) {
+        // Let the filter see current row.
+        scan.getFilter().filterRowKey(endKey, 0, endKey.length);
+        return scan.getFilter().filterAllRemaining();
+      }
+      if (scan.getOldFilter() != null) {
+        scan.getOldFilter().filterRowKey(endKey, 0, endKey.length);
+        return scan.getOldFilter().filterAllRemaining();
+      }
+      return false; //unlikely.
     }
 
     public Result next() throws IOException {
@@ -2071,4 +2079,14 @@
       };
     }
   }
+  
+  private static byte [] stripColon(final byte [] n) {
+    byte col = n[n.length-1];
+    if (col == ':') {
+      byte [] res = new byte[n.length-1];
+      System.arraycopy(n, 0, res, 0, n.length-1);
+      return res;
+    }
+    return n;
+  }
 }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RowWhileMatchFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RowWhileMatchFilter.java?rev=787871&r1=787870&r2=787871&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RowWhileMatchFilter.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RowWhileMatchFilter.java
Tue Jun 23 23:30:37 2009
@@ -93,4 +93,4 @@
       throw new RuntimeException("Failed deserialize.", e);
     }
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/package-info.java?rev=787871&r1=787870&r2=787871&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/package-info.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/package-info.java
Tue Jun 23 23:30:37 2009
@@ -20,11 +20,11 @@
 /**Provides row-level filters applied to HRegion scan results during calls to
  * {@link org.apache.hadoop.hbase.client.ResultScanner#next()}. 
 
-<p>Since HBase 0.20.0, {@link Filter} is the new Interface used filtering.
-It replaces the deprecated {@link RowFilterInterface}.
+<p>Since HBase 0.20.0, {@link org.apache.hadoop.hbase.filter.Filter} is the new Interface
used filtering.
+It replaces the deprecated {@link org.apache.hadoop.hbase.filter.RowFilterInterface}.
 Filters run the extent of a table unless you wrap your filter in a
-{@link RowWhileMatchFilter}.  The latter returns as soon as the filter
-stops matching.
+{@link org.apache.hadoop.hbase.filter.RowWhileMatchFilter}.
+The latter returns as soon as the filter stops matching.
 </p>
 */
 package org.apache.hadoop.hbase.filter;

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/master/BaseScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/master/BaseScanner.java?rev=787871&r1=787870&r2=787871&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/master/BaseScanner.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/master/BaseScanner.java
Tue Jun 23 23:30:37 2009
@@ -349,14 +349,15 @@
     }
     HServerInfo storedInfo = null;
     synchronized (this.master.regionManager) {
-      /*
-       * We don't assign regions that are offline, in transition or were on
-       * a dead server. Regions that were on a dead server will get reassigned
-       * by ProcessServerShutdown
+      /* We don't assign regions that are offline, in transition or were on
+       * a dead server (unless they have an empty serverName which would imply
+       * they haven't been assigned in the first place OR it was closed from
+       * the shell with 'close_region' which deletes server and startcode
+       * from .META. so region gets reassigned). Regions that were on a dead
+       * server will get reassigned by ProcessServerShutdown
        */
       if (info.isOffline() ||
-        this.master.regionManager.
-            regionIsInTransition(info.getRegionNameAsString()) ||
+        (serverName != null && this.master.regionManager.regionIsInTransition(info.getRegionNameAsString()))
||
           (serverName != null && this.master.serverManager.isDead(serverName))) {
         return;
       }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/master/HMaster.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/master/HMaster.java?rev=787871&r1=787870&r2=787871&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/master/HMaster.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/master/HMaster.java
Tue Jun 23 23:30:37 2009
@@ -168,7 +168,7 @@
         conf.get("hbase.master.dns.interface","default"),
         conf.get("hbase.master.dns.nameserver","default"));
     addressStr += ":" + 
-      conf.get("hbase.master.port", Integer.toString(DEFAULT_MASTER_PORT));
+      conf.get(MASTER_PORT, Integer.toString(DEFAULT_MASTER_PORT));
     HServerAddress address = new HServerAddress(addressStr);
     LOG.info("My address is " + address);
 

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java?rev=787871&r1=787870&r2=787871&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
Tue Jun 23 23:30:37 2009
@@ -1128,8 +1128,10 @@
   public void delete(byte [] family, List<KeyValue> kvs, boolean writeToWAL)
   throws IOException {
     long now = System.currentTimeMillis();
+    byte [] byteNow = Bytes.toBytes(now);
     boolean flush = false;
     this.updatesLock.readLock().lock();
+
     try {
       if (writeToWAL) {
         this.log.append(regionInfo.getRegionName(),
@@ -1158,7 +1160,10 @@
           KeyValue getkv = result.get(0);
           Bytes.putBytes(kv.getBuffer(), kv.getTimestampOffset(),
             getkv.getBuffer(), getkv.getTimestampOffset(), Bytes.SIZEOF_LONG);
+        } else {
+          kv.updateLatestStamp(byteNow);
         }
+
         size = this.memcacheSize.addAndGet(store.delete(kv));
       }
       flush = isFlushSize(size);

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseTestCase.java?rev=787871&r1=787870&r2=787871&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
Tue Jun 23 23:30:37 2009
@@ -29,6 +29,7 @@
 import java.util.SortedMap;
 
 import junit.framework.TestCase;
+import junit.framework.AssertionFailedError;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -658,4 +659,13 @@
       root.getLog().closeAndDelete();
     }
   }
+
+  public void assertByteEquals(byte[] expected,
+                               byte[] actual) {
+    if (Bytes.compareTo(expected, actual) != 0) {
+      throw new AssertionFailedError("expected:<" +
+      Bytes.toString(expected) + "> but was:<" +
+      Bytes.toString(actual) + ">");
+    }
+  }
 }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java?rev=787871&r1=787870&r2=787871&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java
Tue Jun 23 23:30:37 2009
@@ -23,6 +23,7 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.TreeMap;
+import java.util.Iterator;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -57,7 +58,14 @@
   private final String DIR = "test/build/data/TestHRegion/";
   
   private final int MAX_VERSIONS = 2;
-  
+
+  // Test names
+  private final byte[] tableName = Bytes.toBytes("testtable");;
+  private final byte[] qual1 = Bytes.toBytes("qual1");
+  private final byte[] value1 = Bytes.toBytes("value1");
+  private final byte[] value2 = Bytes.toBytes("value2");
+  private final byte [] row = Bytes.toBytes("rowA");
+
   /**
    * @see org.apache.hadoop.hbase.HBaseTestCase#setUp()
    */
@@ -325,13 +333,68 @@
     assertTrue(Bytes.equals(rowB, results.get(0).getRow()));
 
   }
+
+  public void testDeleteColumns_PostInsert() throws IOException,
+      InterruptedException {
+    Delete delete = new Delete(row);
+    delete.deleteColumns(fam1, qual1);
+    doTestDelete_AndPostInsert(delete);
+  }
+
+  public void testDeleteFamily_PostInsert() throws IOException, InterruptedException {
+    Delete delete = new Delete(row);
+    delete.deleteFamily(fam1);
+    doTestDelete_AndPostInsert(delete);
+  }
+
+  public void doTestDelete_AndPostInsert(Delete delete)
+      throws IOException, InterruptedException {
+    initHRegion(tableName, getName(), fam1);
+    Put put = new Put(row);
+    put.add(fam1, qual1, value1);
+    region.put(put);
+
+    Thread.sleep(10);
+    
+    // now delete the value:
+    region.delete(delete, null, true);
+
+    Thread.sleep(10);
+
+    // ok put data:
+    put = new Put(row);
+    put.add(fam1, qual1, value2);
+    region.put(put);
+
+    // ok get:
+    Get get = new Get(row);
+    get.addColumn(fam1, qual1);
+
+    Result r = region.get(get, null);
+    assertEquals(1, r.size());
+    assertByteEquals(value2, r.getValue(fam1, qual1));
+
+    // next:
+    Scan scan = new Scan(row);
+    scan.addColumn(fam1, qual1);
+    InternalScanner s = region.getScanner(scan);
+
+    List<KeyValue> results = new ArrayList<KeyValue>();
+    assertEquals(false, s.next(results));
+    assertEquals(1, results.size());
+    KeyValue kv = results.get(0);
+
+    assertByteEquals(value2, kv.getValue());
+    assertByteEquals(fam1, kv.getFamily());
+    assertByteEquals(qual1, kv.getQualifier());
+    assertByteEquals(row, kv.getRow());
+  }
+
+
   
-  //Visual test, since the method doesn't return anything
   public void testDelete_CheckTimestampUpdated()
   throws IOException {
-    byte [] tableName = Bytes.toBytes("testtable");
     byte [] row1 = Bytes.toBytes("row1");
-    byte [] fam1 = Bytes.toBytes("fam1");
     byte [] col1 = Bytes.toBytes("col1");
     byte [] col2 = Bytes.toBytes("col2");
     byte [] col3 = Bytes.toBytes("col3");
@@ -345,8 +408,19 @@
     kvs.add(new KeyValue(row1, fam1, col1, null));
     kvs.add(new KeyValue(row1, fam1, col2, null));
     kvs.add(new KeyValue(row1, fam1, col3, null));
-    
+
     region.delete(fam1, kvs, true);
+
+    // extract the key values out the memcache:
+    // This is kinda hacky, but better than nothing...
+    long now = System.currentTimeMillis();
+    KeyValue firstKv = region.getStore(fam1).memcache.memcache.first();
+    assertTrue(firstKv.getTimestamp() <= now);
+    now = firstKv.getTimestamp();
+    for (KeyValue kv : region.getStore(fam1).memcache.memcache) {
+      assertTrue(kv.getTimestamp() <= now);
+      now = kv.getTimestamp();
+    }
   }
   
   //////////////////////////////////////////////////////////////////////////////
@@ -1054,15 +1128,14 @@
     byte [] qf1 = Bytes.toBytes("qualifier1");
     byte [] qf2 = Bytes.toBytes("qualifier2");
     byte [] fam1 = Bytes.toBytes("fam1");
-    byte [][] families = {fam1};
-    
+
     long ts1 = 1; //System.currentTimeMillis();
     long ts2 = ts1 + 1;
     long ts3 = ts1 + 2;
     
     //Setting up region
     String method = this.getName();
-    initHRegion(tableName, method, families);
+    initHRegion(tableName, method, fam1);
     
     //Putting data in Region
     Put put = null;
@@ -1105,15 +1178,64 @@
       assertEquals(expected.get(i), actual.get(i));
     }
   }
+
+  public void testScanner_StopRow1542() throws IOException {
+    byte [] tableName = Bytes.toBytes("test_table");
+    byte [] family = Bytes.toBytes("testFamily");
+    initHRegion(tableName, getName(), family);
+
+    byte [] row1 = Bytes.toBytes("row111");
+    byte [] row2 = Bytes.toBytes("row222");
+    byte [] row3 = Bytes.toBytes("row333");
+    byte [] row4 = Bytes.toBytes("row444");
+    byte [] row5 = Bytes.toBytes("row555");
+
+    byte [] col1 = Bytes.toBytes("Pub111");
+    byte [] col2 = Bytes.toBytes("Pub222");
+
+
+
+    Put put = new Put(row1);
+    put.add(family, col1, Bytes.toBytes(10L));
+    region.put(put);
+
+    put = new Put(row2);
+    put.add(family, col1, Bytes.toBytes(15L));
+    region.put(put);
+
+    put = new Put(row3);
+    put.add(family, col2, Bytes.toBytes(20L));
+    region.put(put);
+
+    put = new Put(row4);
+    put.add(family, col2, Bytes.toBytes(30L));
+    region.put(put);
+
+    put = new Put(row5);
+    put.add(family, col1, Bytes.toBytes(40L));
+    region.put(put);
+
+    Scan scan = new Scan(row3, row4);
+    scan.setMaxVersions();
+    scan.addColumn(family, col1);
+    InternalScanner s = region.getScanner(scan);
+
+    List<KeyValue> results = new ArrayList<KeyValue>();
+    assertEquals(false, s.next(results));
+    assertEquals(0, results.size());
+
+
+
+    
+  }
   
   public void testScanner_Wildcard_FromMemcacheAndFiles_EnforceVersions()
   throws IOException {
     byte [] tableName = Bytes.toBytes("testtable");
     byte [] row1 = Bytes.toBytes("row1");
     byte [] fam1 = Bytes.toBytes("fam1");
-    byte [][] families = {fam1};
     byte [] qf1 = Bytes.toBytes("qualifier1");
-    byte [] qf2 = Bytes.toBytes("qualifier2");
+    byte [] qf2 = Bytes.toBytes("quateslifier2");
     
     long ts1 = 1;
     long ts2 = ts1 + 1;
@@ -1122,7 +1244,7 @@
     
     //Setting up region
     String method = this.getName();
-    initHRegion(tableName, method, families);
+    initHRegion(tableName, method, fam1);
     
     //Putting data in Region
     KeyValue kv14 = new KeyValue(row1, fam1, qf1, ts4, KeyValue.Type.Put, null);

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/master.jsp
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/master.jsp?rev=787871&r1=787870&r2=787871&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/master.jsp (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/master.jsp Tue Jun 23 23:30:37
2009
@@ -61,7 +61,7 @@
 <tr><td>HBase Root Directory</td><td><%= master.getRootDir().toString()
%></td><td>Location of HBase home directory</td></tr>
 <tr><td>Load average</td><td><%= master.getAverageLoad() %></td><td>Average
number of regions per regionserver. Naive computation.</td></tr>
 <tr><td>Regions On FS</td><td><%= master.countRegionsOnFS() %></td><td>Number
of regions on FileSystem. Rough count.</td></tr>
-<tr><td>Zookeeper Quorum</td><td><%= master.getZooKeeperWrapper().getQuorumServers()
%></td><td>Addresses of all registered ZK servers.</td></tr>
+<tr><td>Zookeeper Quorum</td><td><%= master.getZooKeeperWrapper().getQuorumServers()
%></td><td>Addresses of all registered ZK servers. For more, see <a href="/zk.jsp">zk
dump</a>.</td></tr>
 </table>
 
 <h2>Catalog Tables</h2>
@@ -147,7 +147,7 @@
      Arrays.sort(serverNames);
      for (String serverName: serverNames) {
        HServerInfo hsi = serverToServerInfos.get(serverName);
-       String hostname = hsi.getServerAddress().getInetSocketAddress().getAddress().getHostAddress()
+ ":" + hsi.getInfoPort();
+       String hostname = hsi.getServerAddress().getHostname() + ":" + hsi.getInfoPort();
        String url = "http://" + hostname + "/";
        totalRegions += hsi.getLoad().getNumberOfRegions();
        totalRequests += hsi.getLoad().getNumberOfRequests() / interval;

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/table.jsp
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/table.jsp?rev=787871&r1=787870&r2=787871&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/table.jsp (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/table.jsp Tue Jun 23 23:30:37 2009
@@ -110,7 +110,7 @@
 %>
 <tr>
   <td><%= Bytes.toString(meta.getRegionName()) %></td>
-    <td><a href="<%= url %>"><%= meta.getServer().toString() %></a></td>
+    <td><a href="<%= url %>"><%= meta.getServer().getHostname().toString()
+ ":" + infoPort %></a></td>
     <td>-</td><td><%= Bytes.toString(meta.getStartKey()) %></td><td>-</td>
 </tr>
 <%  } %>
@@ -141,7 +141,7 @@
 %>
 <tr>
   <td><a href="<%= urlRegionHistorian %>"><%= Bytes.toStringBinary(hriEntry.getKey().getRegionName())%></a></td>
-  <td><a href="<%= urlRegionServer %>"><%= hriEntry.getValue().toString()
%></a></td>
+  <td><a href="<%= urlRegionServer %>"><%= hriEntry.getValue().getHostname().toString()
+ ":" + infoPort %></a></td>
   <td><%= hriEntry.getKey().getEncodedName()%></td> <td><%= Bytes.toStringBinary(hriEntry.getKey().getStartKey())%></td>
   <td><%= Bytes.toStringBinary(hriEntry.getKey().getEndKey())%></td>
 </tr>

Added: hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/zk.jsp
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/zk.jsp?rev=787871&view=auto
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/zk.jsp (added)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/webapps/master/zk.jsp Tue Jun 23 23:30:37 2009
@@ -0,0 +1,35 @@
+<%@ page contentType="text/html;charset=UTF-8"
+  import="java.io.IOException"
+  import="org.apache.hadoop.hbase.client.HBaseAdmin"
+  import="org.apache.hadoop.hbase.client.HConnection"
+  import="org.apache.hadoop.hbase.HRegionInfo"
+  import="org.apache.hadoop.hbase.zookeeper.ZooKeeperWrapper"
+  import="org.apache.hadoop.hbase.HBaseConfiguration"
+  import="org.apache.hadoop.hbase.master.HMaster" 
+  import="org.apache.hadoop.hbase.HConstants"%><%
+  HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
+  HBaseConfiguration conf = master.getConfiguration();
+  HBaseAdmin hbadmin = new HBaseAdmin(conf);
+  HConnection connection = hbadmin.getConnection();
+  ZooKeeperWrapper wrapper = connection.getZooKeeperWrapper();
+%>
+
+<?xml version="1.0" encoding="UTF-8" ?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" 
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> 
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head><meta http-equiv="Content-Type" content="text/html;charset=UTF-8"/>
+<title>ZooKeeper Dump</title>
+<link rel="stylesheet" type="text/css" href="/static/hbase.css" />
+</head>
+<body>
+<a id="logo" href="http://hbase.org"><img src="/static/hbase_logo_med.gif" alt="HBase
Logo" title="HBase Logo" /></a>
+<h1 id="page_title">ZooKeeper Dump</h1>
+<p id="links_menu"><a href="/master.jsp">Master</a>, <a href="/logs/">Local
logs</a>, <a href="/stacks">Thread Dump</a>, <a href="/logLevel">Log
Level</a></p>
+<hr id="head_rule" />
+<pre>
+<%= wrapper.dump() %>
+</pre>
+
+</body>
+</html>



Mime
View raw message