hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r656868 [1/10] - in /hadoop/hbase/trunk: ./ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/filter/ src/java/org/apache/hadoop/hbase/hql/ src/java/org/apache/hadoop/hbase/io/ src/j...
Date Thu, 15 May 2008 22:10:50 GMT
Author: stack
Date: Thu May 15 15:10:47 2008
New Revision: 656868

URL: http://svn.apache.org/viewvc?rev=656868&view=rev
Log:
HBASE-82 row keys should be array of bytes

Added:
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
Removed:
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/TextSequence.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RowMap.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/hql/TestHQL.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/TestTextSequence.java
Modified:
    hadoop/hbase/trunk/CHANGES.txt
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMerge.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMsg.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/NotServingRegionException.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnection.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerCallable.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ServerCallable.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/InclusiveStopRowFilter.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RowFilterInterface.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RowFilterSet.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/WhileMatchRowFilter.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/AlterCommand.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/CreateCommand.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/DeleteCommand.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/DescCommand.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/InsertCommand.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SchemaModificationCommand.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/TruncateCommand.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HMasterRegionInterface.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexTableReduce.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/RowCounter.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableReduce.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableSplit.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/AddColumn.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/MetaRegion.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/MetaScanner.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ModifyColumn.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionOpen.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionStatusChange.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessServerShutdown.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RegionManager.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RootScanner.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ServerManager.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogEdit.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStore.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStoreScanner.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Memcache.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/RegionUnavailableListener.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/GenericHandler.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/ScannerHandler.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/TableHandler.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/JenkinsHash.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Merge.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Migrate.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftSortedMap.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Writables.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/DisabledTestScanner2.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/MapFilePerformanceEvaluation.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/MultiRegionTable.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestBloomFilters.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestCompare.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestEmptyMetaInfo.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestGlobalMemcacheLimit.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestHBaseCluster.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestInfoServers.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestMasterAdmin.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestRegionRebalancing.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestScannerAPI.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestSerialization.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestTable.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestToString.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TimestampTestBase.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestHTable.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestListTables.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestTimestamp.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestInclusiveStopRowFilter.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestPageRowFilter.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestRegExpRowFilter.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestRowFilterAfterWrite.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestRowFilterOnMultipleFamilies.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestRowFilterSet.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestStopRowFilter.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestWhileMatchRowFilter.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteAll.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteFamily.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestGet.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHMemcache.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestLogRolling.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestRegionServerExit.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestSplit.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestTimestamp.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMergeTool.java
    hadoop/hbase/trunk/src/webapps/master/master.jsp
    hadoop/hbase/trunk/src/webapps/master/table.jsp
    hadoop/hbase/trunk/src/webapps/regionserver/regionserver.jsp

Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Thu May 15 15:10:47 2008
@@ -5,6 +5,8 @@
                Jim Kellerman) (API change for filters)
    HBASE-601   Just remove deprecated methods in HTable; 0.2 is not backward
                compatible anyways
+   HBASE-82    Row keys should be array of bytes
+   HBASE-76    Purge servers of Text (Done as part of HBASE-82 commit).
 
   BUG FIXES
    HBASE-574   HBase does not load hadoop native libs (Rong-En Fan via Stack)
@@ -28,6 +30,9 @@
    HBASE-614   Retiring regions is not used; exploit or remove
    HBASE-538   Improve exceptions that come out on client-side
    HBASE-569   DemoClient.php (Jim R. Wilson via Stack)
+   HBASE-522   Where new Text(string) might be used in client side method calls,
+               add an overload that takes string (Done as part of HBASE-82)
+   HBASE-570   Remove HQL unit test (Done as part of HBASE-82 commit).
 
 
 Release 0.1.2 - 05/13/2008

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java Thu May 15 15:10:47 2008
@@ -22,14 +22,11 @@
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparable;
 
-import org.apache.hadoop.hbase.io.TextSequence;
-
 /**
  * An HColumnDescriptor contains information about a column family such as the
  * number of versions, compression settings, etc.
@@ -40,12 +37,11 @@
  * deleted when the column is deleted.
  */
 public class HColumnDescriptor implements WritableComparable {
-  
   // For future backward compatibility
-  private static final byte COLUMN_DESCRIPTOR_VERSION = (byte)3;
-  
-  /** Legal family names can only contain 'word characters' and end in a colon. */
-  public static final Pattern LEGAL_FAMILY_NAME = Pattern.compile("\\w+:");
+
+  // Version 3 was when column names becaome byte arrays and when we picked up
+  // Time-to-live feature.  Version 4 was when we moved to byte arrays, HBASE-82.
+  private static final byte COLUMN_DESCRIPTOR_VERSION = (byte)4;
 
   /** 
    * The type of compression.
@@ -65,22 +61,22 @@
    */
   public static final CompressionType DEFAULT_COMPRESSION_TYPE =
     CompressionType.NONE;
-  
+
   /**
    * Default number of versions of a record to keep.
    */
   public static final int DEFAULT_N_VERSIONS = 3;
-  
+
   /**
    * Default setting for whether to serve from memory or not.
    */
   public static final boolean DEFAULT_IN_MEMORY = false;
-  
+
   /**
    * Default setting for whether to use a block cache or not.
    */
   public static final boolean DEFAULT_BLOCK_CACHE_ENABLED = false;
-  
+
   /**
    * Default maximum length of cell contents.
    */
@@ -96,56 +92,71 @@
    */
   public static final BloomFilterDescriptor DEFAULT_BLOOM_FILTER_DESCRIPTOR =
     null;
-  
+
   // Column family name
-  private Text name;
+  private byte [] name;
   // Number of versions to keep
-  private int maxVersions;
+  private int maxVersions = DEFAULT_N_VERSIONS;
   // Compression setting if any
-  private CompressionType compressionType;
+  private CompressionType compressionType = DEFAULT_COMPRESSION_TYPE;
   // Serve reads from in-memory cache
-  private boolean inMemory;
+  private boolean inMemory = DEFAULT_IN_MEMORY;
   // Serve reads from in-memory block cache
-  private boolean blockCacheEnabled;
+  private boolean blockCacheEnabled = DEFAULT_BLOCK_CACHE_ENABLED;
   // Maximum value size
-  private int maxValueLength;
+  private int maxValueLength = Integer.MAX_VALUE;
   // Time to live of cell contents, in seconds from last timestamp
-  private int timeToLive;
+  private int timeToLive = HConstants.FOREVER;
   // True if bloom filter was specified
-  private boolean bloomFilterSpecified;
+  private boolean bloomFilterSpecified = false;
   // Descriptor of bloom filter
-  private BloomFilterDescriptor bloomFilter;
-  // Version number of this class
-  private byte versionNumber;
-  // Family name without the ':'
-  private transient Text familyName = null;
-  
+  private BloomFilterDescriptor bloomFilter = DEFAULT_BLOOM_FILTER_DESCRIPTOR;
+
   /**
    * Default constructor. Must be present for Writable.
    */
   public HColumnDescriptor() {
-    this(null);
+    this.name = null;
   }
-  
+
   /**
    * Construct a column descriptor specifying only the family name 
    * The other attributes are defaulted.
    * 
    * @param columnName - column family name
    */
-  public HColumnDescriptor(String columnName) {
-    this(columnName == null || columnName.length() <= 0?
-      new Text(): new Text(columnName),
+  public HColumnDescriptor(final String columnName) {
+    this(Bytes.toBytes(columnName));
+  }
+
+  /**
+   * Construct a column descriptor specifying only the family name 
+   * The other attributes are defaulted.
+   * 
+   * @param columnName - column family name
+   */
+  public HColumnDescriptor(final Text columnName) {
+    this(columnName.getBytes());
+  }
+  
+  /**
+   * Construct a column descriptor specifying only the family name 
+   * The other attributes are defaulted.
+   * 
+   * @param columnName Column family name.  Must have the ':' ending.
+   */
+  public HColumnDescriptor(final byte [] columnName) {
+    this (columnName == null || columnName.length <= 0?
+      HConstants.EMPTY_BYTE_ARRAY: columnName,
       DEFAULT_N_VERSIONS, DEFAULT_COMPRESSION_TYPE, DEFAULT_IN_MEMORY,
       DEFAULT_BLOCK_CACHE_ENABLED, 
       Integer.MAX_VALUE, DEFAULT_TIME_TO_LIVE,
       DEFAULT_BLOOM_FILTER_DESCRIPTOR);
   }
-  
+
   /**
    * Constructor
-   * Specify all parameters.
-   * @param name Column family name
+   * @param columnName Column family name.  Must have the ':' ending.
    * @param maxVersions Maximum number of versions to keep
    * @param compression Compression type
    * @param inMemory If true, column data should be kept in an HRegionServer's
@@ -161,25 +172,14 @@
    * end in a <code>:</code>
    * @throws IllegalArgumentException if the number of versions is &lt;= 0
    */
-  public HColumnDescriptor(final Text name, final int maxVersions,
+  public HColumnDescriptor(final byte [] columnName, final int maxVersions,
       final CompressionType compression, final boolean inMemory,
       final boolean blockCacheEnabled,
       final int maxValueLength, final int timeToLive,
       final BloomFilterDescriptor bloomFilter) {
-    String familyStr = name.toString();
-    // Test name if not null (It can be null when deserializing after
-    // construction but before we've read in the fields);
-    if (familyStr.length() > 0) {
-      Matcher m = LEGAL_FAMILY_NAME.matcher(familyStr);
-      if(m == null || !m.matches()) {
-        throw new IllegalArgumentException("Illegal family name <" + name +
-          ">. Family names can only contain " +
-          "'word characters' and must end with a ':'");
-      }
-    }
-    this.name = name;
-
-    if(maxVersions <= 0) {
+    isLegalFamilyName(columnName);
+    this.name = stripColon(columnName);
+    if (maxVersions <= 0) {
       // TODO: Allow maxVersion of 0 to be the way you say "Keep all versions".
       // Until there is support, consider 0 or < 0 -- a configuration error.
       throw new IllegalArgumentException("Maximum versions must be positive");
@@ -191,26 +191,49 @@
     this.timeToLive = timeToLive;
     this.bloomFilter = bloomFilter;
     this.bloomFilterSpecified = this.bloomFilter == null ? false : true;
-    this.versionNumber = COLUMN_DESCRIPTOR_VERSION;
     this.compressionType = compression;
   }
-
-  /** @return name of column family */
-  public Text getName() {
-    return name;
+  
+  private static byte [] stripColon(final byte [] n) {
+    byte [] result = new byte [n.length - 1];
+    // Have the stored family name be absent the colon delimiter
+    System.arraycopy(n, 0, result, 0, n.length - 1);
+    return result;
   }
-
-  /** @return name of column family without trailing ':' */
-  public synchronized Text getFamilyName() {
-    if (name != null) {
-      if (familyName == null) {
-        familyName = new TextSequence(name, 0, name.getLength() - 1).toText();
+  
+  /**
+   * @param b Family name.
+   * @return <code>b</code>
+   * @throws IllegalArgumentException If not null and not a legitimate family
+   * name: i.e. 'printable' and ends in a ':' (Null passes are allowed because
+   * <code>b</code> can be null when deserializing).
+   */
+  public static byte [] isLegalFamilyName(final byte [] b) {
+    if (b == null) {
+      return b;
+    }
+    if (b[b.length - 1] != ':') {
+      throw new IllegalArgumentException("Family names must end in a colon: " +
+        Bytes.toString(b));
+    }
+    for (int i = 0; i < (b.length - 1); i++) {
+      if (Character.isLetterOrDigit(b[i]) || b[i] == '_' || b[i] == '.') {
+        continue;
       }
-      return familyName;
+      throw new IllegalArgumentException("Illegal character <" + b[i] +
+        ">. Family names  can only contain  'word characters' and must end" +
+        "with a colon: " + Bytes.toString(b));
     }
-    return null;
+    return b;
   }
-  
+
+  /**
+   * @return Name of this column family
+   */
+  public byte [] getName() {
+    return name;
+  }
+
   /** @return compression type being used for the column family */
   public CompressionType getCompression() {
     return this.compressionType;
@@ -266,9 +289,7 @@
   /** {@inheritDoc} */
   @Override
   public String toString() {
-    // Output a name minus ':'.
-    String tmp = name.toString();
-    return "{name: " + tmp.substring(0, tmp.length() - 1) +
+    return "{name: " + Bytes.toString(name) +
       ", max versions: " + maxVersions +
       ", compression: " + this.compressionType + ", in memory: " + inMemory +
       ", block cache enabled: " + blockCacheEnabled +
@@ -290,7 +311,7 @@
   /** {@inheritDoc} */
   @Override
   public int hashCode() {
-    int result = this.name.hashCode();
+    int result = Bytes.hashCode(this.name);
     result ^= Integer.valueOf(this.maxVersions).hashCode();
     result ^= this.compressionType.hashCode();
     result ^= Boolean.valueOf(this.inMemory).hashCode();
@@ -298,8 +319,8 @@
     result ^= Integer.valueOf(this.maxValueLength).hashCode();
     result ^= Integer.valueOf(this.timeToLive).hashCode();
     result ^= Boolean.valueOf(this.bloomFilterSpecified).hashCode();
-    result ^= Byte.valueOf(this.versionNumber).hashCode();
-    if(this.bloomFilterSpecified) {
+    result ^= Byte.valueOf(COLUMN_DESCRIPTOR_VERSION).hashCode();
+    if (this.bloomFilterSpecified) {
       result ^= this.bloomFilter.hashCode();
     }
     return result;
@@ -309,8 +330,14 @@
 
   /** {@inheritDoc} */
   public void readFields(DataInput in) throws IOException {
-    this.versionNumber = in.readByte();
-    this.name.readFields(in);
+    int versionNumber = in.readByte();
+    if (versionNumber <= 2) {
+      Text t = new Text();
+      t.readFields(in);
+      this.name = t.getBytes();
+    } else {
+      this.name = Bytes.readByteArray(in);
+    }
     this.maxVersions = in.readInt();
     int ordinal = in.readInt();
     this.compressionType = CompressionType.values()[ordinal];
@@ -323,19 +350,19 @@
       bloomFilter.readFields(in);
     }
     
-    if (this.versionNumber > 1) {
+    if (versionNumber > 1) {
       this.blockCacheEnabled = in.readBoolean();
     }
 
-    if (this.versionNumber > 2) {
+    if (versionNumber > 2) {
       this.timeToLive = in.readInt();
     }
   }
 
   /** {@inheritDoc} */
   public void write(DataOutput out) throws IOException {
-    out.writeByte(this.versionNumber);
-    this.name.write(out);
+    out.writeByte(COLUMN_DESCRIPTOR_VERSION);
+    Bytes.writeByteArray(out, this.name);
     out.writeInt(this.maxVersions);
     out.writeInt(this.compressionType.ordinal());
     out.writeBoolean(this.inMemory);
@@ -345,28 +372,16 @@
     if(bloomFilterSpecified) {
       bloomFilter.write(out);
     }
-
-    if (this.versionNumber > 1) {
-      out.writeBoolean(this.blockCacheEnabled);
-    }
-
-    if (this.versionNumber > 2) {
-      out.writeInt(this.timeToLive);
-    }
+    out.writeBoolean(this.blockCacheEnabled);
+    out.writeInt(this.timeToLive);
   }
 
   // Comparable
 
   /** {@inheritDoc} */
   public int compareTo(Object o) {
-    // NOTE: we don't do anything with the version number yet.
-    // Version numbers will come into play when we introduce an incompatible
-    // change in the future such as the addition of access control lists.
-    
     HColumnDescriptor other = (HColumnDescriptor)o;
-    
-    int result = this.name.compareTo(other.getName());
-    
+    int result = Bytes.compareTo(this.name, other.getName());
     if(result == 0) {
       result = Integer.valueOf(this.maxVersions).compareTo(
           Integer.valueOf(other.maxVersions));
@@ -426,4 +441,4 @@
     
     return result;
   }
-}
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java Thu May 15 15:10:47 2008
@@ -19,8 +19,8 @@
  */
 package org.apache.hadoop.hbase;
 
-import org.apache.hadoop.io.Text;
 import org.apache.hadoop.hbase.ipc.HRegionInterface;
+import org.apache.hadoop.hbase.util.Bytes;
 
 /**
  * HConstants holds a bunch of HBase-related constants
@@ -30,6 +30,8 @@
   /** long constant for zero */
   static final Long ZERO_L = Long.valueOf(0L);
   
+  static final String NINES = "99999999999999";
+  
   // For migration
 
   /** name of version file */
@@ -117,62 +119,69 @@
   // should go down.
 
   /** The root table's name.*/
-  static final Text ROOT_TABLE_NAME = new Text("-ROOT-");
+  static final byte [] ROOT_TABLE_NAME = Bytes.toBytes("-ROOT-");
 
   /** The META table's name. */
-  static final Text META_TABLE_NAME = new Text(".META.");
+  static final byte [] META_TABLE_NAME = Bytes.toBytes(".META.");
 
   // Defines for the column names used in both ROOT and META HBase 'meta' tables.
   
   /** The ROOT and META column family (string) */
   static final String COLUMN_FAMILY_STR = "info:";
 
-  /** The ROOT and META column family (Text) */
-  static final Text COLUMN_FAMILY = new Text(COLUMN_FAMILY_STR);
+  /** The ROOT and META column family */
+  static final byte [] COLUMN_FAMILY = Bytes.toBytes(COLUMN_FAMILY_STR);
 
   /** Array of meta column names */
-  static final Text [] COLUMN_FAMILY_ARRAY = new Text [] {COLUMN_FAMILY};
+  static final byte[][] COLUMN_FAMILY_ARRAY = new byte[][] {COLUMN_FAMILY};
   
   /** ROOT/META column family member - contains HRegionInfo */
-  static final Text COL_REGIONINFO = new Text(COLUMN_FAMILY + "regioninfo");
+  static final byte [] COL_REGIONINFO =
+    Bytes.toBytes(COLUMN_FAMILY_STR + "regioninfo");
 
   /** Array of column - contains HRegionInfo */
-  static final Text[] COL_REGIONINFO_ARRAY = new Text [] {COL_REGIONINFO};
+  static final byte[][] COL_REGIONINFO_ARRAY = new byte[][] {COL_REGIONINFO};
   
   /** ROOT/META column family member - contains HServerAddress.toString() */
-  static final Text COL_SERVER = new Text(COLUMN_FAMILY + "server");
+  static final byte[] COL_SERVER = Bytes.toBytes(COLUMN_FAMILY_STR + "server");
   
   /** ROOT/META column family member - contains server start code (a long) */
-  static final Text COL_STARTCODE = new Text(COLUMN_FAMILY + "serverstartcode");
+  static final byte [] COL_STARTCODE =
+    Bytes.toBytes(COLUMN_FAMILY_STR + "serverstartcode");
 
   /** the lower half of a split region */
-  static final Text COL_SPLITA = new Text(COLUMN_FAMILY_STR + "splitA");
+  static final byte [] COL_SPLITA = Bytes.toBytes(COLUMN_FAMILY_STR + "splitA");
   
   /** the upper half of a split region */
-  static final Text COL_SPLITB = new Text(COLUMN_FAMILY_STR + "splitB");
+  static final byte [] COL_SPLITB = Bytes.toBytes(COLUMN_FAMILY_STR + "splitB");
   
   /** All the columns in the catalog -ROOT- and .META. tables.
    */
-  static final Text[] ALL_META_COLUMNS = {COL_REGIONINFO, COL_SERVER,
+  static final byte[][] ALL_META_COLUMNS = {COL_REGIONINFO, COL_SERVER,
     COL_STARTCODE, COL_SPLITA, COL_SPLITB};
 
   // Other constants
 
   /**
-   * An empty instance of Text.
+   * An empty instance.
    */
-  static final Text EMPTY_TEXT = new Text();
+  static final byte [] EMPTY_BYTE_ARRAY = new byte [0];
   
   /**
    * Used by scanners, etc when they want to start at the beginning of a region
    */
-  static final Text EMPTY_START_ROW = EMPTY_TEXT;
+  static final byte [] EMPTY_START_ROW = EMPTY_BYTE_ARRAY;
+  
+  /**
+   * Last row in a table.
+   */
+  static final byte [] EMPTY_END_ROW = EMPTY_START_ROW;
 
   /** 
     * Used by scanners and others when they're trying to detect the end of a 
     * table 
     */
-  static final Text LAST_ROW = EMPTY_TEXT;
+  static final byte [] LAST_ROW = EMPTY_BYTE_ARRAY;
   
   /** When we encode strings, we always specify UTF8 encoding */
   static final String UTF8_ENCODING = "UTF-8";

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMerge.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMerge.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMerge.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMerge.java Thu May 15 15:10:47 2008
@@ -22,28 +22,25 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Map;
 import java.util.NoSuchElementException;
 import java.util.Random;
-import java.util.SortedMap;
 import java.util.TreeMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.HConnectionManager;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Scanner;
 import org.apache.hadoop.hbase.io.BatchUpdate;
-import org.apache.hadoop.hbase.io.RowResult;
 import org.apache.hadoop.hbase.io.Cell;
+import org.apache.hadoop.hbase.io.RowResult;
 import org.apache.hadoop.hbase.regionserver.HLog;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Writables;
 
 /** 
@@ -57,7 +54,9 @@
   /*
    * Not instantiable
    */
-  private HMerge() {}
+  private HMerge() {
+    super();
+  }
   
   /**
    * Scans the table and merges two adjacent regions if they are small. This
@@ -73,13 +72,13 @@
    * @throws IOException
    */
   public static void merge(HBaseConfiguration conf, FileSystem fs,
-    Text tableName)
+    final byte [] tableName)
   throws IOException {
     HConnection connection = HConnectionManager.getConnection(conf);
     boolean masterIsRunning = connection.isMasterRunning();
     HConnectionManager.deleteConnection(conf);
-    if(tableName.equals(META_TABLE_NAME)) {
-      if(masterIsRunning) {
+    if (Bytes.equals(tableName, META_TABLE_NAME)) {
+      if (masterIsRunning) {
         throw new IllegalStateException(
             "Can not compact META table if instance is on-line");
       }
@@ -101,9 +100,9 @@
     private final long maxFilesize;
 
     
-    protected Merger(HBaseConfiguration conf, FileSystem fs, Text tableName)
-        throws IOException {
-      
+    protected Merger(HBaseConfiguration conf, FileSystem fs,
+      final byte [] tableName)
+    throws IOException {
       this.conf = conf;
       this.fs = fs;
       this.maxFilesize =
@@ -184,19 +183,21 @@
     
     protected abstract HRegionInfo[] next() throws IOException;
     
-    protected abstract void updateMeta(Text oldRegion1, Text oldRegion2,
-        HRegion newRegion) throws IOException;
+    protected abstract void updateMeta(final byte [] oldRegion1,
+      final byte [] oldRegion2, HRegion newRegion)
+    throws IOException;
     
   }
 
   /** Instantiated to compact a normal user table */
   private static class OnlineMerger extends Merger {
-    private final Text tableName;
+    private final byte [] tableName;
     private final HTable table;
     private final Scanner metaScanner;
     private HRegionInfo latestRegion;
     
-    OnlineMerger(HBaseConfiguration conf, FileSystem fs, Text tableName)
+    OnlineMerger(HBaseConfiguration conf, FileSystem fs,
+      final byte [] tableName)
     throws IOException {
       super(conf, fs, tableName);
       this.tableName = tableName;
@@ -217,7 +218,7 @@
             COL_REGIONINFO);
         }
         HRegionInfo region = Writables.getHRegionInfo(regionInfo.getValue());
-        if (!region.getTableDesc().getName().equals(this.tableName)) {
+        if (!Bytes.equals(region.getTableDesc().getName(), this.tableName)) {
           return null;
         }
         checkOfflined(region);
@@ -276,16 +277,16 @@
     }
 
     @Override
-    protected void updateMeta(Text oldRegion1, Text oldRegion2, 
+    protected void updateMeta(final byte [] oldRegion1,
+        final byte [] oldRegion2, 
       HRegion newRegion)
     throws IOException {
-      Text[] regionsToDelete = {oldRegion1, oldRegion2};
-      for(int r = 0; r < regionsToDelete.length; r++) {
-        if(regionsToDelete[r].equals(latestRegion.getRegionName())) {
+      byte[][] regionsToDelete = {oldRegion1, oldRegion2};
+      for (int r = 0; r < regionsToDelete.length; r++) {
+        if(Bytes.equals(regionsToDelete[r], latestRegion.getRegionName())) {
           latestRegion = null;
         }
         table.deleteAll(regionsToDelete[r]);
-
         if(LOG.isDebugEnabled()) {
           LOG.debug("updated columns in row: " + regionsToDelete[r]);
         }
@@ -321,15 +322,16 @@
       // Scan root region to find all the meta regions
       
       root = new HRegion(rootTableDir, hlog, fs, conf,
-          HRegionInfo.rootRegionInfo, null, null);
+          HRegionInfo.ROOT_REGIONINFO, null, null);
 
       InternalScanner rootScanner = 
-        root.getScanner(COL_REGIONINFO_ARRAY, new Text(), 
+        root.getScanner(COL_REGIONINFO_ARRAY, HConstants.EMPTY_START_ROW, 
         HConstants.LATEST_TIMESTAMP, null);
       
       try {
         HStoreKey key = new HStoreKey();
-        TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
+        TreeMap<byte [], byte[]> results =
+          new TreeMap<byte [], byte[]>(Bytes.BYTES_COMPARATOR);
         while(rootScanner.next(key, results)) {
           for(byte [] b: results.values()) {
             HRegionInfo info = Writables.getHRegionInfoOrNull(b);
@@ -360,13 +362,10 @@
     }
 
     @Override
-    protected void updateMeta(Text oldRegion1, Text oldRegion2,
-        HRegion newRegion) throws IOException {
-      
-      Text[] regionsToDelete = {
-          oldRegion1,
-          oldRegion2
-      };
+    protected void updateMeta(final byte [] oldRegion1,
+      final byte [] oldRegion2, HRegion newRegion)
+    throws IOException {
+      byte[][] regionsToDelete = {oldRegion1, oldRegion2};
       for(int r = 0; r < regionsToDelete.length; r++) {
         BatchUpdate b = new BatchUpdate(regionsToDelete[r]);
         b.delete(COL_REGIONINFO);

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMsg.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMsg.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMsg.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMsg.java Thu May 15 15:10:47 2008
@@ -188,7 +188,7 @@
       message.append(") : ");
       break;
     }
-    message.append(info == null ? "null" : info.getRegionName());
+    message.append(info == null ? "null": info.getRegionNameAsString());
     return message.toString();
   }
   
@@ -211,4 +211,4 @@
      this.msg = in.readByte();
      this.info.readFields(in);
    }
-}
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java Thu May 15 15:10:47 2008
@@ -23,10 +23,9 @@
 import java.io.DataOutput;
 import java.io.IOException;
 
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableComparable;
-
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.JenkinsHash;
+import org.apache.hadoop.io.WritableComparable;
 
 /**
  * HRegion information.
@@ -38,21 +37,20 @@
    * @param regionName
    * @return the encodedName
    */
-  public static String encodeRegionName(final Text regionName) {
-    return String.valueOf(Math.abs(
-        JenkinsHash.hash(regionName.getBytes(), regionName.getLength(), 0)));
+  public static int encodeRegionName(final byte [] regionName) {
+    return Math.abs(JenkinsHash.hash(regionName, regionName.length, 0));
   }
 
   /** delimiter used between portions of a region name */
-  private static final String DELIMITER = ",";
+  public static final int DELIMITER = ',';
 
   /** HRegionInfo for root region */
-  public static final HRegionInfo rootRegionInfo =
-    new HRegionInfo(0L, HTableDescriptor.rootTableDesc);
+  public static final HRegionInfo ROOT_REGIONINFO =
+    new HRegionInfo(0L, HTableDescriptor.ROOT_TABLEDESC);
 
   /** HRegionInfo for first meta region */
-  public static final HRegionInfo firstMetaRegionInfo =
-    new HRegionInfo(1L, HTableDescriptor.metaTableDesc);
+  public static final HRegionInfo FIRST_META_REGIONINFO =
+    new HRegionInfo(1L, HTableDescriptor.META_TABLEDESC);
   
   /**
    * Extracts table name prefix from a region name.
@@ -60,26 +58,34 @@
    * @param regionName A region name.
    * @return The table prefix of a region name.
    */
-  public static Text getTableNameFromRegionName(final Text regionName) {
-    int offset = regionName.find(DELIMITER);
+  public static byte [] getTableNameFromRegionName(final byte [] regionName) {
+    int offset = -1;
+    for (int i = 0; i < regionName.length; i++) {
+      if (regionName[i] == DELIMITER) {
+        offset = i;
+        break;
+      }
+    }
     if (offset == -1) {
-      throw new IllegalArgumentException(regionName.toString() + " does not " +
-        "contain '" + DELIMITER + "' character");
+      throw new IllegalArgumentException(Bytes.toString(regionName) +
+        " does not contain '" + DELIMITER + "' character");
     }
     byte [] tableName = new byte[offset];
-    System.arraycopy(regionName.getBytes(), 0, tableName, 0, offset);
-    return new Text(tableName);
+    System.arraycopy(regionName, 0, tableName, 0, offset);
+    return tableName;
   }
 
-  private Text endKey;
-  private boolean offLine;
-  private long regionId;
-  private Text regionName;
-  private boolean split;
-  private Text startKey;
-  private HTableDescriptor tableDesc;
-  private int hashCode;
-  private transient String encodedName = null;
+  private byte [] endKey = HConstants.EMPTY_BYTE_ARRAY;
+  private boolean offLine = false;
+  private long regionId = -1;
+  private byte [] regionName = HConstants.EMPTY_BYTE_ARRAY;
+  private String regionNameStr = "";
+  private boolean split = false;
+  private byte [] startKey = HConstants.EMPTY_BYTE_ARRAY;
+  private HTableDescriptor tableDesc = null;
+  private int hashCode = -1;
+  public static final int NO_HASH = -1;
+  private volatile int encodedName = NO_HASH;
   
   private void setHashCode() {
     int result = this.regionName.hashCode();
@@ -91,29 +97,21 @@
     this.hashCode = result;
   }
   
-  /** Used to construct the HRegionInfo for the root and first meta regions */
+  /**
+   * Private constructor used constructing HRegionInfo for the catalog root and
+   * first meta regions
+   */
   private HRegionInfo(long regionId, HTableDescriptor tableDesc) {
     this.regionId = regionId;
     this.tableDesc = tableDesc;
-    this.endKey = new Text();
-    this.offLine = false;
-    this.regionName = new Text(tableDesc.getName().toString() + DELIMITER +
-        DELIMITER + regionId);
-    this.split = false;
-    this.startKey = new Text();
+    this.regionName = createRegionName(tableDesc.getName(), null, regionId);
+    this.regionNameStr = Bytes.toString(this.regionName);
     setHashCode();
   }
 
   /** Default constructor - creates empty object */
   public HRegionInfo() {
-    this.endKey = new Text();
-    this.offLine = false;
-    this.regionId = 0;
-    this.regionName = new Text();
-    this.split = false;
-    this.startKey = new Text();
     this.tableDesc = new HTableDescriptor();
-    this.hashCode = 0;
   }
   
   /**
@@ -124,8 +122,9 @@
    * @param endKey end of key range
    * @throws IllegalArgumentException
    */
-  public HRegionInfo(HTableDescriptor tableDesc, Text startKey, Text endKey)
-    throws IllegalArgumentException {
+  public HRegionInfo(final HTableDescriptor tableDesc, final byte [] startKey,
+      final byte [] endKey)
+  throws IllegalArgumentException {
     this(tableDesc, startKey, endKey, false);
   }
 
@@ -139,38 +138,65 @@
    * regions that may or may not hold references to this region.
    * @throws IllegalArgumentException
    */
-  public HRegionInfo(HTableDescriptor tableDesc, Text startKey, Text endKey,
-      final boolean split) throws IllegalArgumentException {
-
-    if(tableDesc == null) {
+  public HRegionInfo(HTableDescriptor tableDesc, final byte [] startKey,
+      final byte [] endKey, final boolean split)
+  throws IllegalArgumentException {
+    if (tableDesc == null) {
       throw new IllegalArgumentException("tableDesc cannot be null");
     }
-
-    this.endKey = new Text();
-    if(endKey != null) {
-      this.endKey.set(endKey);
-    }
-    
     this.offLine = false;
     this.regionId = System.currentTimeMillis();
-    
-    this.regionName = new Text(tableDesc.getName().toString() + DELIMITER +
-        (startKey == null ? "" : startKey.toString()) + DELIMITER +
-        regionId);
-      
+    this.regionName = createRegionName(tableDesc.getName(), startKey, regionId);
+    this.regionNameStr = Bytes.toString(this.regionName);
     this.split = split;
-
-    this.startKey = new Text();
-    if(startKey != null) {
-      this.startKey.set(startKey);
-    }
-    
+    this.endKey = endKey == null? HConstants.EMPTY_END_ROW: endKey.clone();
+    this.startKey = startKey == null?
+      HConstants.EMPTY_START_ROW: startKey.clone();
     this.tableDesc = tableDesc;
     setHashCode();
   }
   
+  private static byte [] createRegionName(final byte [] tableName,
+      final byte [] startKey, final long regionid) {
+    return createRegionName(tableName, startKey, Long.toString(regionid));
+  }
+
+  /**
+   * Make a region name of passed parameters.
+   * @param tableName
+   * @param startKey Can be null
+   * @param id Region id.
+   * @return Region name made of passed tableName, startKey and id
+   */
+  public static byte [] createRegionName(final byte [] tableName,
+      final byte [] startKey, final String id) {
+    return createRegionName(tableName, startKey, Bytes.toBytes(id));
+  }
+  /**
+   * Make a region name of passed parameters.
+   * @param tableName
+   * @param startKey Can be null
+   * @param id Region id
+   * @return Region name made of passed tableName, startKey and id
+   */
+  public static byte [] createRegionName(final byte [] tableName,
+      final byte [] startKey, final byte [] id) {
+    byte [] b = new byte [tableName.length + 2 + id.length +
+       (startKey == null? 0: startKey.length)];
+    int offset = tableName.length;
+    System.arraycopy(tableName, 0, b, 0, offset);
+    b[offset++] = DELIMITER;
+    if (startKey != null && startKey.length > 0) {
+      System.arraycopy(startKey, 0, b, offset, startKey.length);
+      offset += startKey.length;
+    }
+    b[offset++] = DELIMITER;
+    System.arraycopy(id, 0, b, offset, id.length);
+    return b;
+  }
+  
   /** @return the endKey */
-  public Text getEndKey(){
+  public byte [] getEndKey(){
     return endKey;
   }
 
@@ -179,21 +205,31 @@
     return regionId;
   }
 
-  /** @return the regionName */
-  public Text getRegionName(){
+  /**
+   * @return the regionName as an array of bytes.
+   * @see #getRegionNameAsString()
+   */
+  public byte [] getRegionName(){
     return regionName;
   }
+
+  /**
+   * @return Region name as a String for use in logging, etc.
+   */
+  public String getRegionNameAsString() {
+    return this.regionNameStr;
+  }
   
   /** @return the encoded region name */
-  public synchronized String getEncodedName() {
-    if (encodedName == null) {
-      encodedName = encodeRegionName(regionName);
+  public synchronized int getEncodedName() {
+    if (this.encodedName == NO_HASH) {
+      this.encodedName = encodeRegionName(this.regionName);
     }
-    return encodedName;
+    return this.encodedName;
   }
 
   /** @return the startKey */
-  public Text getStartKey(){
+  public byte [] getStartKey(){
     return startKey;
   }
 
@@ -250,8 +286,9 @@
    */
   @Override
   public String toString() {
-    return "regionname: " + this.regionName.toString() + ", startKey: <" +
-      this.startKey.toString() + ">, endKey: <" + this.endKey.toString() + 
+    return "regionname: " + this.regionNameStr + ", startKey: <" +
+      Bytes.toString(this.startKey) + ">, endKey: <" +
+      Bytes.toString(this.endKey) + 
       ">, encodedName: " + getEncodedName() + "," +
       (isOffline()? " offline: true,": "") + (isSplit()? " split: true,": "") +
       " tableDesc: {" + this.tableDesc.toString() + "}";
@@ -281,12 +318,12 @@
    * {@inheritDoc}
    */
   public void write(DataOutput out) throws IOException {
-    endKey.write(out);
+    Bytes.writeByteArray(out, endKey);
     out.writeBoolean(offLine);
     out.writeLong(regionId);
-    regionName.write(out);
+    Bytes.writeByteArray(out, regionName);
     out.writeBoolean(split);
-    startKey.write(out);
+    Bytes.writeByteArray(out, startKey);
     tableDesc.write(out);
     out.writeInt(hashCode);
   }
@@ -295,12 +332,13 @@
    * {@inheritDoc}
    */
   public void readFields(DataInput in) throws IOException {
-    this.endKey.readFields(in);
+    this.endKey = Bytes.readByteArray(in);
     this.offLine = in.readBoolean();
     this.regionId = in.readLong();
-    this.regionName.readFields(in);
+    this.regionName = Bytes.readByteArray(in);
+    this.regionNameStr = Bytes.toString(this.regionName);
     this.split = in.readBoolean();
-    this.startKey.readFields(in);
+    this.startKey = Bytes.readByteArray(in);
     this.tableDesc.readFields(in);
     this.hashCode = in.readInt();
   }
@@ -322,12 +360,12 @@
     }
 
     // Compare start keys.
-    result = this.startKey.compareTo(other.startKey);
+    result = Bytes.compareTo(this.startKey, other.startKey);
     if (result != 0) {
       return result;
     }
     
     // Compare end keys.
-    return this.endKey.compareTo(other.endKey);
+    return Bytes.compareTo(this.endKey, other.endKey);
   }
 }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java Thu May 15 15:10:47 2008
@@ -19,14 +19,16 @@
  */
 package org.apache.hadoop.hbase;
 
-import org.apache.hadoop.hbase.io.TextSequence;
-import org.apache.hadoop.io.*;
 
-import java.io.*;
-import java.nio.ByteBuffer;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.io.WritableComparable;
 
 /**
- * A Key for a stored row
+ * A Key for a stored row.
  */
 public class HStoreKey implements WritableComparable {
   /**
@@ -34,14 +36,13 @@
    */
   public static final char COLUMN_FAMILY_DELIMITER = ':';
   
-  private Text row;
-  private Text column;
-  private long timestamp;
-
+  private byte [] row = HConstants.EMPTY_BYTE_ARRAY;
+  private byte [] column = HConstants.EMPTY_BYTE_ARRAY;
+  private long timestamp = Long.MAX_VALUE;
 
   /** Default constructor used in conjunction with Writable interface */
   public HStoreKey() {
-    this(new Text());
+    super();
   }
   
   /**
@@ -51,10 +52,21 @@
    * 
    * @param row - row key
    */
-  public HStoreKey(Text row) {
+  public HStoreKey(final byte [] row) {
     this(row, Long.MAX_VALUE);
   }
-  
+
+  /**
+   * Create an HStoreKey specifying only the row
+   * The column defaults to the empty string and the time stamp defaults to
+   * Long.MAX_VALUE
+   * 
+   * @param row - row key
+   */
+  public HStoreKey(final String row) {
+    this(row, Long.MAX_VALUE);
+  }
+
   /**
    * Create an HStoreKey specifying the row and timestamp
    * The column name defaults to the empty string
@@ -62,10 +74,21 @@
    * @param row row key
    * @param timestamp timestamp value
    */
-  public HStoreKey(Text row, long timestamp) {
-    this(row, new Text(), timestamp);
+  public HStoreKey(final byte [] row, long timestamp) {
+    this(row, HConstants.EMPTY_BYTE_ARRAY, timestamp);
   }
-  
+
+  /**
+   * Create an HStoreKey specifying the row and timestamp
+   * The column name defaults to the empty string
+   * 
+   * @param row row key
+   * @param timestamp timestamp value
+   */
+  public HStoreKey(final String row, long timestamp) {
+    this (row, "", timestamp);
+  }
+
   /**
    * Create an HStoreKey specifying the row and column names
    * The timestamp defaults to LATEST_TIMESTAMP
@@ -73,28 +96,51 @@
    * @param row row key
    * @param column column key
    */
-  public HStoreKey(Text row, Text column) {
+  public HStoreKey(final String row, final String column) {
     this(row, column, HConstants.LATEST_TIMESTAMP);
   }
-  
+
   /**
-   * Create an HStoreKey specifying all the fields
+   * Create an HStoreKey specifying the row and column names
+   * The timestamp defaults to LATEST_TIMESTAMP
    * 
    * @param row row key
    * @param column column key
+   */
+  public HStoreKey(final byte [] row, final byte [] column) {
+    this(row, column, HConstants.LATEST_TIMESTAMP);
+  }
+
+  /**
+   * Create an HStoreKey specifying all the fields
+   * Does not make copies of the passed byte arrays. Presumes the passed 
+   * arrays immutable.
+   * @param row row key
+   * @param column column key
    * @param timestamp timestamp value
    */
-  public HStoreKey(Text row, Text column, long timestamp) {
-    // Make copies by doing 'new Text(arg)'.
-    this.row = new Text(row);
-    this.column = new Text(column);
+  public HStoreKey(final String row, final String column, long timestamp) {
+    this (Bytes.toBytes(row), Bytes.toBytes(column), timestamp);
+  }
+
+  /**
+   * Create an HStoreKey specifying all the fields
+   * Does not make copies of the passed byte arrays. Presumes the passed 
+   * arrays immutable.
+   * @param row row key
+   * @param column column key
+   * @param timestamp timestamp value
+   */
+  public HStoreKey(final byte [] row, final byte [] column, long timestamp) {
+    // Make copies
+    this.row = row;
+    this.column = column;
     this.timestamp = timestamp;
   }
   
   /** @return Approximate size in bytes of this key. */
   public long getSize() {
-    return this.row.getLength() + this.column.getLength() +
-      8 /* There is no sizeof in java. Presume long is 8 (64bit machine)*/;
+    return this.row.length + this.column.length + Bytes.SIZEOF_LONG;
   }
   
   /**
@@ -111,19 +157,19 @@
    * 
    * @param newrow new row key value
    */
-  public void setRow(Text newrow) {
-    this.row.set(newrow);
+  public void setRow(byte [] newrow) {
+    this.row = newrow;
   }
   
   /**
-   * Change the value of the column key
+   * Change the value of the column in this key
    * 
-   * @param newcol new column key value
+   * @param c new column family value
    */
-  public void setColumn(Text newcol) {
-    this.column.set(newcol);
+  public void setColumn(byte [] c) {
+    this.column = c;
   }
-  
+
   /**
    * Change the value of the timestamp field
    * 
@@ -145,18 +191,18 @@
   }
   
   /** @return value of row key */
-  public Text getRow() {
+  public byte [] getRow() {
     return row;
   }
   
-  /** @return value of column key */
-  public Text getColumn() {
-    return column;
+  /** @return value of column */
+  public byte [] getColumn() {
+    return this.column;
   }
-  
+
   /** @return value of timestamp */
   public long getTimestamp() {
-    return timestamp;
+    return this.timestamp;
   }
   
   /**
@@ -167,8 +213,8 @@
    * @see #matchesRowFamily(HStoreKey)
    */ 
   public boolean matchesRowCol(HStoreKey other) {
-    return this.row.compareTo(other.row) == 0
-      && this.column.compareTo(other.column) == 0;
+    return Bytes.equals(this.row, other.row) &&
+      Bytes.equals(column, other.column);
   }
   
   /**
@@ -181,8 +227,8 @@
    * @see #matchesRowFamily(HStoreKey)
    */
   public boolean matchesWithoutColumn(HStoreKey other) {
-    return this.row.compareTo(other.row) == 0
-      && this.timestamp >= other.getTimestamp();
+    return Bytes.equals(this.row, other.row) &&
+      this.timestamp >= other.getTimestamp();
   }
   
   /**
@@ -191,21 +237,21 @@
    * @param that Key to compare against. Compares row and column family
    * 
    * @return true if same row and column family
-   * @throws InvalidColumnNameException 
    * @see #matchesRowCol(HStoreKey)
    * @see #matchesWithoutColumn(HStoreKey)
    */
-  public boolean matchesRowFamily(HStoreKey that)
-  throws InvalidColumnNameException {
-    return this.row.compareTo(that.row) == 0 &&
-      extractFamily(this.column).
-        compareTo(extractFamily(that.getColumn())) == 0;
+  public boolean matchesRowFamily(HStoreKey that) {
+    int delimiterIndex = getFamilyDelimiterIndex(this.column);
+    return Bytes.equals(this.row, that.row) &&
+      Bytes.compareTo(this.column, 0, delimiterIndex, that.column, 0,
+        delimiterIndex) == 0;
   }
   
   /** {@inheritDoc} */
   @Override
   public String toString() {
-    return row.toString() + "/" + column.toString() + "/" + timestamp;
+    return Bytes.toString(this.row) + "/" + Bytes.toString(this.column) + "/" +
+      timestamp;
   }
   
   /** {@inheritDoc} */
@@ -228,11 +274,14 @@
   /** {@inheritDoc} */
   public int compareTo(Object o) {
     HStoreKey other = (HStoreKey)o;
-    int result = this.row.compareTo(other.row);
+    int result = Bytes.compareTo(this.row, other.row);
     if (result != 0) {
       return result;
     }
-    result = this.column.compareTo(other.column);
+    result = this.column == null && other.column == null? 0:
+      this.column == null && other.column != null? -1:
+      this.column != null && other.column == null? 1:
+      Bytes.compareTo(this.column, other.column);
     if (result != 0) {
       return result;
     }
@@ -248,108 +297,136 @@
     return result;
   }
 
-  // Writable
-
-  /** {@inheritDoc} */
-  public void write(DataOutput out) throws IOException {
-    row.write(out);
-    column.write(out);
-    out.writeLong(timestamp);
+  /**
+   * @param column
+   * @return New byte array that holds <code>column</code> family prefix.
+   * @see #parseColumn(byte[])
+   */
+  public static byte [] getFamily(final byte [] column) {
+    int index = getFamilyDelimiterIndex(column);
+    if (index <= 0) {
+      throw new IllegalArgumentException("No ':' delimiter between " +
+        "column family and qualifier in the passed column name <" +
+        Bytes.toString(column) + ">");
+    }
+    byte [] result = new byte[index];
+    System.arraycopy(column, 0, result, 0, index);
+    return result;
   }
-
-  /** {@inheritDoc} */
-  public void readFields(DataInput in) throws IOException {
-    row.readFields(in);
-    column.readFields(in);
-    timestamp = in.readLong();
-  }
-  
-  // Statics
-  // TODO: Move these utility methods elsewhere (To a Column class?).
-  
-  /**
-   * Extracts the column family name from a column
-   * For example, returns 'info' if the specified column was 'info:server'
-   * @param col name of column
-   * @return column famile as a TextSequence based on the passed
-   * <code>col</code>.  If <code>col</code> is reused, make a new Text of
-   * the result by calling {@link TextSequence#toText()}.
-   * @throws InvalidColumnNameException 
-   */
-  public static TextSequence extractFamily(final Text col)
-  throws InvalidColumnNameException {
-    return extractFamily(col, false);
+  
+  /**
+   * @param column
+   * @return Return hash of family portion of passed column.
+   */
+  public static Integer getFamilyMapKey(final byte [] column) {
+    int index = getFamilyDelimiterIndex(column);
+    // If index < -1, presume passed column is a family name absent colon
+    // delimiter
+    return Bytes.mapKey(column, index > 0? index: column.length);
   }
   
   /**
-   * Extracts the column family name from a column
-   * For example, returns 'info' if the specified column was 'info:server'
-   * @param col name of column
-   * @param withColon set to true if colon separator should be returned
-   * @return column famile as a TextSequence based on the passed
-   * <code>col</code>.  If <code>col</code> is reused, make a new Text of
-   * the result by calling {@link TextSequence#toText()}.
-   * @throws InvalidColumnNameException 
-   */
-  public static TextSequence extractFamily(final Text col,
-    final boolean withColon)
-  throws InvalidColumnNameException {
-    int offset = getColonOffset(col);
-    // Include ':' in copy?
-    offset += (withColon)? 1: 0;
-    if (offset == col.getLength()) {
-      return new TextSequence(col);
+   * @param family
+   * @param column
+   * @return True if <code>column</code> has a family of <code>family</code>.
+   */
+  public static boolean matchingFamily(final byte [] family,
+      final byte [] column) {
+    // Make sure index of the ':' is at same offset.
+    int index = getFamilyDelimiterIndex(column);
+    if (index != family.length) {
+      return false;
     }
-    return new TextSequence(col, 0, offset);
+    return Bytes.compareTo(family, 0, index, column, 0, index) == 0;
   }
   
   /**
-   * Extracts the column qualifier, the portion that follows the colon (':')
-   * family/qualifier separator.
-   * For example, returns 'server' if the specified column was 'info:server'
-   * @param col name of column
-   * @return column qualifier as a TextSequence based on the passed
-   * <code>col</code>.  If <code>col</code> is reused, make a new Text of
-   * the result by calling {@link TextSequence#toText()}.
-   * @throws InvalidColumnNameException 
-   */
-  public static TextSequence extractQualifier(final Text col)
-  throws InvalidColumnNameException {
-    int offset = getColonOffset(col);
-    if (offset + 1 == col.getLength()) {
-      return null;
+   * @param family
+   * @return Return <code>family</code> plus the family delimiter.
+   */
+  public static byte [] addDelimiter(final byte [] family) {
+    // Manufacture key by adding delimiter to the passed in colFamily.
+    byte [] familyPlusDelimiter = new byte [family.length + 1];
+    System.arraycopy(family, 0, familyPlusDelimiter, 0, family.length);
+    familyPlusDelimiter[family.length] = HStoreKey.COLUMN_FAMILY_DELIMITER;
+    return familyPlusDelimiter;
+  }
+
+  /**
+   * @param column
+   * @return New byte array that holds <code>column</code> qualifier suffix.
+   * @see #parseColumn(byte[])
+   */
+  public static byte [] getQualifier(final byte [] column) {
+    int index = getFamilyDelimiterIndex(column);
+    int len = column.length - (index + 1);
+    byte [] result = new byte[len];
+    System.arraycopy(column, index + 1, result, 0, len);
+    return result;
+  }
+
+  /**
+   * @param c Column name
+   * @return Return array of size two whose first element has the family
+   * prefix of passed column <code>c</code> and whose second element is the
+   * column qualifier.
+   */
+  public static byte [][] parseColumn(final byte [] c) {
+    byte [][] result = new byte [2][];
+    int index = getFamilyDelimiterIndex(c);
+    if (index == -1) {
+      throw new IllegalArgumentException("Impossible column name: " + c);
     }
-    return new TextSequence(col, offset + 1);
+    result[0] = new byte [index];
+    System.arraycopy(c, 0, result[0], 0, index);
+    int len = c.length - (index + 1);
+    result[1] = new byte[len];
+    System.arraycopy(c, index + 1 /*Skip delimiter*/, result[1], 0,
+      len);
+    return result;
   }
   
-  private static int getColonOffset(final Text col)
-  throws InvalidColumnNameException {
-    int offset = -1;
-    ByteBuffer bb = ByteBuffer.wrap(col.getBytes());
-    for (int lastPosition = bb.position(); bb.hasRemaining();
-        lastPosition = bb.position()) {
-      if (Text.bytesToCodePoint(bb) == COLUMN_FAMILY_DELIMITER) {
-        offset = lastPosition;
+  /**
+   * @param b
+   * @return Index of the family-qualifier colon delimiter character in passed
+   * buffer.
+   */
+  public static int getFamilyDelimiterIndex(final byte [] b) {
+    if (b == null) {
+      throw new NullPointerException();
+    }
+    int result = -1;
+    for (int i = 0; i < b.length; i++) {
+      if (b[i] == COLUMN_FAMILY_DELIMITER) {
+        result = i;
         break;
       }
     }
-    if(offset < 0) {
-      throw new InvalidColumnNameException(col + " is missing the colon " +
-        "family/qualifier separator");
-    }
-    return offset;
+    return result;
   }
 
   /**
    * Returns row and column bytes out of an HStoreKey.
    * @param hsk Store key.
    * @return byte array encoding of HStoreKey
-   * @throws UnsupportedEncodingException
    */
-  public static byte[] getBytes(final HStoreKey hsk)
-  throws UnsupportedEncodingException {
-    StringBuilder s = new StringBuilder(hsk.getRow().toString());
-    s.append(hsk.getColumn().toString());
-    return s.toString().getBytes(HConstants.UTF8_ENCODING);
+  public static byte[] getBytes(final HStoreKey hsk) {
+    return Bytes.add(hsk.getRow(), hsk.getColumn());
+  }
+  
+  // Writable
+
+  /** {@inheritDoc} */
+  public void write(DataOutput out) throws IOException {
+    Bytes.writeByteArray(out, this.row);
+    Bytes.writeByteArray(out, this.column);
+    out.writeLong(timestamp);
+  }
+
+  /** {@inheritDoc} */
+  public void readFields(DataInput in) throws IOException {
+    this.row = Bytes.readByteArray(in);
+    this.column = Bytes.readByteArray(in);
+    this.timestamp = in.readLong();
   }
 }
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java Thu May 15 15:10:47 2008
@@ -22,16 +22,14 @@
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.util.Collection;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
-import java.util.SortedMap;
-import java.util.TreeMap;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.WritableComparable;
 
 /**
@@ -39,74 +37,98 @@
  * column families.
  */
 public class HTableDescriptor implements WritableComparable {
-  /** table descriptor for root table */
-  public static final HTableDescriptor rootTableDesc =
+  /** Table descriptor for <core>-ROOT-</code> catalog table */
+  public static final HTableDescriptor ROOT_TABLEDESC =
     new HTableDescriptor(HConstants.ROOT_TABLE_NAME,
         new HColumnDescriptor(HConstants.COLUMN_FAMILY, 1,
             HColumnDescriptor.CompressionType.NONE, false, false,
             Integer.MAX_VALUE, HConstants.FOREVER, null));
   
-  /** table descriptor for meta table */
-  public static final HTableDescriptor metaTableDesc =
+  /** Table descriptor for <code>.META.</code> catalog table */
+  public static final HTableDescriptor META_TABLEDESC =
     new HTableDescriptor(HConstants.META_TABLE_NAME,
         new HColumnDescriptor(HConstants.COLUMN_FAMILY, 1,
             HColumnDescriptor.CompressionType.NONE, false, false,
             Integer.MAX_VALUE, HConstants.FOREVER, null));
   
-  private boolean rootregion;
-  private boolean metaregion;
-  private Text name;
-  // TODO: Does this need to be a treemap?  Can it be a HashMap?
-  private final TreeMap<Text, HColumnDescriptor> families;
-  
-  /*
-   * Legal table names can only contain 'word characters':
-   * i.e. <code>[a-zA-Z_0-9-.]</code>.
-   * Lets be restrictive until a reason to be otherwise. One reason to limit
-   * characters in table name is to ensure table regions as entries in META
-   * regions can be found (See HADOOP-1581 'HBASE: Un-openable tablename bug').
-   */
-  private static final Pattern LEGAL_TABLE_NAME =
-    Pattern.compile("^[\\w-.]+$");
-
-  /** Used to construct the table descriptors for root and meta tables */
-  private HTableDescriptor(Text name, HColumnDescriptor family) {
-    rootregion = name.equals(HConstants.ROOT_TABLE_NAME);
+  private boolean rootregion = false;
+  private boolean metaregion = false;
+  private byte [] name = HConstants.EMPTY_BYTE_ARRAY;
+  private String nameAsString = "";
+  
+  // Key is hash of the family name.
+  private final Map<Integer, HColumnDescriptor> families =
+    new HashMap<Integer, HColumnDescriptor>();
+
+  /**
+   * Private constructor used internally creating table descriptors for 
+   * catalog tables: e.g. .META. and -ROOT-.
+   */
+  private HTableDescriptor(final byte [] name, HColumnDescriptor family) {
+    this.name = name.clone();
+    this.rootregion = Bytes.equals(name, HConstants.ROOT_TABLE_NAME);
     this.metaregion = true;
-    this.name = new Text(name);
-    this.families = new TreeMap<Text, HColumnDescriptor>();
-    families.put(family.getName(), family);
+    this.families.put(Bytes.mapKey(family.getName()), family);
   }
 
   /**
    * Constructs an empty object.
    * For deserializing an HTableDescriptor instance only.
-   * @see #HTableDescriptor(String)
+   * @see #HTableDescriptor(byte[])
    */
   public HTableDescriptor() {
-    this.name = new Text();
-    this.families = new TreeMap<Text, HColumnDescriptor>();
+    super();
   }
 
   /**
    * Constructor.
    * @param name Table name.
    * @throws IllegalArgumentException if passed a table name
-   * that is made of other than 'word' characters: i.e.
-   * <code>[a-zA-Z_0-9]
+   * that is made of other than 'word' characters, underscore or period: i.e.
+   * <code>[a-zA-Z_0-9.].
+   * @see <a href="HADOOP-1581">HADOOP-1581 HBASE: Un-openable tablename bug</a>
    */
-  public HTableDescriptor(String name) {
-    this();
-    Matcher m = LEGAL_TABLE_NAME.matcher(name);
-    if (m == null || !m.matches()) {
-      throw new IllegalArgumentException(
-          "Table names can only contain 'word characters': i.e. [a-zA-Z_0-9");
-    }
-    this.name.set(name);
-    this.rootregion = false;
-    this.metaregion = false;
+  public HTableDescriptor(final String name) {
+    this(Bytes.toBytes(name));
+  }
+
+  /**
+   * Constructor.
+   * @param name Table name.
+   * @throws IllegalArgumentException if passed a table name
+   * that is made of other than 'word' characters, underscore or period: i.e.
+   * <code>[a-zA-Z_0-9.].
+   * @see <a href="HADOOP-1581">HADOOP-1581 HBASE: Un-openable tablename bug</a>
+   */
+  public HTableDescriptor(final byte [] name) {
+    this.name = isLegalTableName(name);
+    this.nameAsString = Bytes.toString(this.name);
   }
   
+  /**
+   * Check passed buffer is legal user-space table name.
+   * @param b Table name.
+   * @return Returns passed <code>b</code> param
+   * @throws NullPointerException If passed <code>b</code> is null
+   * @throws IllegalArgumentException if passed a table name
+   * that is made of other than 'word' characters or underscores: i.e.
+   * <code>[a-zA-Z_0-9].
+   */
+  public static byte [] isLegalTableName(final byte [] b) {
+    if (b == null || b.length <= 0) {
+      throw new IllegalArgumentException("Name is null or empty");
+    }
+    for (int i = 0; i < b.length; i++) {
+      if (Character.isLetterOrDigit(b[i]) || b[i] == '_') {
+        continue;
+      }
+      throw new IllegalArgumentException("Illegal character <" + b[i] + ">. " +
+        "User-space table names can only contain 'word characters':" +
+        "i.e. [a-zA-Z_0-9]: " + Bytes.toString(b));
+    }
+    return b;
+  }
+
   /** @return true if this is the root region */
   public boolean isRootRegion() {
     return rootregion;
@@ -123,48 +145,47 @@
   }
 
   /** @return name of table */
-  public Text getName() {
+  public byte [] getName() {
     return name;
   }
 
+  /** @return name of table */
+  public String getNameAsString() {
+    return this.nameAsString;
+  }
+
   /**
    * Adds a column family.
    * @param family HColumnDescriptor of familyto add.
    */
-  public void addFamily(HColumnDescriptor family) {
-    if (family.getName() == null || family.getName().getLength() <= 0) {
+  public void addFamily(final HColumnDescriptor family) {
+    if (family.getName() == null || family.getName().length <= 0) {
       throw new NullPointerException("Family name cannot be null or empty");
     }
-    families.put(family.getName(), family);
+    this.families.put(Bytes.mapKey(family.getName()), family);
   }
 
   /**
    * Checks to see if this table contains the given column family
-   * 
-   * @param family - family name
+   * @param c Family name or column name.
    * @return true if the table contains the specified family name
    */
-  public boolean hasFamily(Text family) {
-    return families.containsKey(family);
+  public boolean hasFamily(final byte [] c) {
+    int index = HStoreKey.getFamilyDelimiterIndex(c);
+    // If index is -1, then presume we were passed a column family name minus
+    // the colon delimiter.
+    return families.containsKey(Bytes.mapKey(c, index == -1? c.length: index));
   }
 
-  /** 
-   * All the column families in this table.
-   * 
-   *  TODO: What is this used for? Seems Dangerous to let people play with our
-   *  private members.
-   *  
-   *  @return map of family members
+  /**
+   * @return Name of this table and then a map of all of the column family
+   * descriptors.
+   * @see #getNameAsString()
    */
-  public TreeMap<Text, HColumnDescriptor> families() {
-    return families;
-  }
-
-  /** {@inheritDoc} */
-  @Override
   public String toString() {
-    return "name: " + this.name.toString() + ", families: " + this.families;
-      }
+    return "name: " + Bytes.toString(this.name) + ", families: " +
+      this.families.values();
+  }
   
   /** {@inheritDoc} */
   @Override
@@ -176,9 +197,9 @@
   @Override
   public int hashCode() {
     // TODO: Cache.
-    int result = this.name.hashCode();
+    int result = Bytes.hashCode(this.name);
     if (this.families != null && this.families.size() > 0) {
-      for (Map.Entry<Text,HColumnDescriptor> e: this.families.entrySet()) {
+      for (HColumnDescriptor e: this.families.values()) {
         result ^= e.hashCode();
       }
     }
@@ -191,7 +212,7 @@
   public void write(DataOutput out) throws IOException {
     out.writeBoolean(rootregion);
     out.writeBoolean(metaregion);
-    name.write(out);
+    Bytes.writeByteArray(out, name);
     out.writeInt(families.size());
     for(Iterator<HColumnDescriptor> it = families.values().iterator();
         it.hasNext(); ) {
@@ -203,13 +224,14 @@
   public void readFields(DataInput in) throws IOException {
     this.rootregion = in.readBoolean();
     this.metaregion = in.readBoolean();
-    this.name.readFields(in);
+    this.name = Bytes.readByteArray(in);
+    this.nameAsString = Bytes.toString(this.name);
     int numCols = in.readInt();
-    families.clear();
-    for(int i = 0; i < numCols; i++) {
+    this.families.clear();
+    for (int i = 0; i < numCols; i++) {
       HColumnDescriptor c = new HColumnDescriptor();
       c.readFields(in);
-      families.put(c.getName(), c);
+      this.families.put(Bytes.mapKey(c.getName()), c);
     }
   }
 
@@ -218,22 +240,21 @@
   /** {@inheritDoc} */
   public int compareTo(Object o) {
     HTableDescriptor other = (HTableDescriptor) o;
-    int result = name.compareTo(other.name);
-    
-    if(result == 0) {
+    int result = Bytes.compareTo(this.name, other.name);
+    if (result == 0) {
       result = families.size() - other.families.size();
     }
     
-    if(result == 0 && families.size() != other.families.size()) {
+    if (result == 0 && families.size() != other.families.size()) {
       result = Integer.valueOf(families.size()).compareTo(
           Integer.valueOf(other.families.size()));
     }
     
-    if(result == 0) {
-      for(Iterator<HColumnDescriptor> it = families.values().iterator(),
+    if (result == 0) {
+      for (Iterator<HColumnDescriptor> it = families.values().iterator(),
           it2 = other.families.values().iterator(); it.hasNext(); ) {
         result = it.next().compareTo(it2.next());
-        if(result != 0) {
+        if (result != 0) {
           break;
         }
       }
@@ -244,8 +265,26 @@
   /**
    * @return Immutable sorted map of families.
    */
-  public SortedMap<Text, HColumnDescriptor> getFamilies() {
-    return Collections.unmodifiableSortedMap(this.families);
+  public Collection<HColumnDescriptor> getFamilies() {
+    return Collections.unmodifiableCollection(this.families.values());
+  }
+
+  /**
+   * @param column
+   * @return Column descriptor for the passed family name or the family on
+   * passed in column.
+   */
+  public HColumnDescriptor getFamily(final byte [] column) {
+    return this.families.get(HStoreKey.getFamilyMapKey(column));
+  }
+
+  /**
+   * @param column
+   * @return Column descriptor for the passed family name or the family on
+   * passed in column.
+   */
+  public HColumnDescriptor removeFamily(final byte [] column) {
+    return this.families.remove(HStoreKey.getFamilyMapKey(column));
   }
 
   /**
@@ -253,7 +292,7 @@
    * @param tableName name of table
    * @return path for table
    */
-  public static Path getTableDir(Path rootdir, Text tableName) {
-    return new Path(rootdir, tableName.toString());
+  public static Path getTableDir(Path rootdir, final byte [] tableName) {
+    return new Path(rootdir, Bytes.toString(tableName));
   }
-}
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java Thu May 15 15:10:47 2008
@@ -33,6 +33,7 @@
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.util.Bytes;
 
 /**
  * This class creates a single process HBase cluster. One thread is created for
@@ -332,7 +333,9 @@
     LocalHBaseCluster cluster = new LocalHBaseCluster(conf);
     cluster.startup();
     HBaseAdmin admin = new HBaseAdmin(conf);
-    admin.createTable(new HTableDescriptor(cluster.getClass().getName()));
+    HTableDescriptor htd =
+      new HTableDescriptor(Bytes.toBytes(cluster.getClass().getName()));
+    admin.createTable(htd);
     cluster.shutdown();
   }
 }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/NotServingRegionException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/NotServingRegionException.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/NotServingRegionException.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/NotServingRegionException.java Thu May 15 15:10:47 2008
@@ -21,6 +21,8 @@
 
 import java.io.IOException;
 
+import org.apache.hadoop.hbase.util.Bytes;
+
 /**
  * Thrown by a region server if it is sent a request for a region it is not
  * serving.
@@ -40,5 +42,12 @@
   public NotServingRegionException(String s) {
     super(s);
   }
-
-}
+  
+  /**
+   * Constructor
+   * @param s message
+   */
+  public NotServingRegionException(final byte [] s) {
+    super(Bytes.toString(s));
+  }
+}
\ No newline at end of file



Mime
View raw message