hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r662531 - in /hadoop/hbase/trunk/src: java/org/apache/hadoop/hbase/ java/org/apache/hadoop/hbase/master/ java/org/apache/hadoop/hbase/util/ test/org/apache/hadoop/hbase/ test/org/apache/hadoop/hbase/util/
Date Mon, 02 Jun 2008 19:36:55 GMT
Author: stack
Date: Mon Jun  2 12:36:55 2008
New Revision: 662531

URL: http://svn.apache.org/viewvc?rev=662531&view=rev
Log:
HBASE-660 [Migration] addColumn/deleteColumn functionality in MetaUtils

Added:
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMetaUtils.java
Modified:
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/FSUtils.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java?rev=662531&r1=662530&r2=662531&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java Mon Jun  2 12:36:55
2008
@@ -299,7 +299,8 @@
 
   /**
    * @param column
-   * @return New byte array that holds <code>column</code> family prefix.
+   * @return New byte array that holds <code>column</code> family prefix only
+   * (Does not include the colon DELIMITER).
    * @throws ColumnNameParseException 
    * @see #parseColumn(byte[])
    */

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java?rev=662531&r1=662530&r2=662531&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java Mon Jun
 2 12:36:55 2008
@@ -25,6 +25,8 @@
 import java.util.Map;
 import java.util.TreeMap;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.ipc.HRegionInterface;
 import org.apache.hadoop.hbase.io.BatchUpdate;
@@ -33,6 +35,7 @@
 
 /** Instantiated to enable or disable a table */
 class ChangeTableState extends TableOperation {
+  private final Log LOG = LogFactory.getLog(this.getClass());
   private boolean online;
 
   protected final Map<String, HashSet<HRegionInfo>> servedRegions =

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java?rev=662531&r1=662530&r2=662531&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java Mon Jun
 2 12:36:55 2008
@@ -20,6 +20,9 @@
 package org.apache.hadoop.hbase.master;
 
 import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.ipc.HRegionInterface;
 import org.apache.hadoop.hbase.io.BatchUpdate;
@@ -27,6 +30,7 @@
 import org.apache.hadoop.hbase.util.Writables;
 
 abstract class ColumnOperation extends TableOperation {
+  private final Log LOG = LogFactory.getLog(this.getClass());
   
   protected ColumnOperation(final HMaster master, final byte [] tableName) 
   throws IOException {
@@ -53,4 +57,4 @@
       LOG.debug("updated columns in row: " + i.getRegionName());
     }
   }
-}
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java?rev=662531&r1=662530&r2=662531&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java Mon Jun 
2 12:36:55 2008
@@ -22,8 +22,10 @@
 import java.io.IOException;
 
 import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.regionserver.HStoreFile;
+import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.ipc.HRegionInterface;
 
 /** Instantiated to remove a column family from a table */
@@ -45,11 +47,8 @@
       i.getTableDesc().removeFamily(columnName);
       updateRegionInfo(server, m.getRegionName(), i);
       // Delete the directories used by the column
-      int encodedName = i.getEncodedName();
-      this.master.fs.delete(
-        HStoreFile.getMapDir(tabledir, encodedName, columnName), true);
-      this.master.fs.delete(
-        HStoreFile.getInfoDir(tabledir, encodedName, columnName), true);
+      FSUtils.deleteColumnFamily(this.master.fs, tabledir, i.getEncodedName(),
+        this.columnName);
     }
   }
 }
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java?rev=662531&r1=662530&r2=662531&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java Mon Jun  2
12:36:55 2008
@@ -21,6 +21,8 @@
 
 import java.io.IOException;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.RemoteExceptionHandler;
@@ -33,6 +35,7 @@
  * Instantiated to delete a table. Table must be offline.
  */
 class TableDelete extends TableOperation {
+  private final Log LOG = LogFactory.getLog(this.getClass());
 
   TableDelete(final HMaster master, final byte [] tableName) throws IOException {
     super(master, tableName);

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java?rev=662531&r1=662530&r2=662531&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java Mon Jun
 2 12:36:55 2008
@@ -25,59 +25,42 @@
 import java.util.List;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.ipc.HRegionInterface;
 import org.apache.hadoop.hbase.HServerInfo;
 import org.apache.hadoop.hbase.MasterNotRunningException;
 import org.apache.hadoop.hbase.RemoteExceptionHandler;
-import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.util.Writables;
 import org.apache.hadoop.hbase.io.RowResult;
-import org.apache.hadoop.hbase.util.Sleeper;
+import org.apache.hadoop.hbase.ipc.HRegionInterface;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Writables;
 
 /**
  * Abstract base class for operations that need to examine all HRegionInfo 
- * objects that make up a table. (For a table, operate on each of its rows
- * in .META.) To gain the 
+ * objects in a table. (For a table, operate on each of its rows
+ * in .META.).
  */
 abstract class TableOperation implements HConstants {
-  static final Long ZERO_L = Long.valueOf(0L);
-  
-  protected static final Log LOG = LogFactory.getLog(TableOperation.class);
-  
-  protected Set<MetaRegion> metaRegions;
-  protected byte [] tableName;
-  protected Set<HRegionInfo> unservedRegions;
+  private final Set<MetaRegion> metaRegions;
+  protected final byte [] tableName;
+  protected final Set<HRegionInfo> unservedRegions = new HashSet<HRegionInfo>();
   protected HMaster master;
-  protected final int numRetries;
-  protected final Sleeper sleeper;
-  
-  protected TableOperation(final HMaster master, final byte [] tableName) 
+
+  protected TableOperation(final HMaster master, final byte [] tableName)
   throws IOException {
-    this.sleeper = master.sleeper;
-    this.numRetries = master.numRetries;
-    
     this.master = master;
-    
     if (!this.master.isMasterRunning()) {
       throw new MasterNotRunningException();
     }
-
     this.tableName = tableName;
-    this.unservedRegions = new HashSet<HRegionInfo>();
 
     // We can not access any meta region if they have not already been
     // assigned and scanned.
-
     if (master.regionManager.metaScannerThread.waitForMetaRegionsOrClose()) {
       // We're shutting down. Forget it.
       throw new MasterNotRunningException(); 
     }
-
     this.metaRegions = master.regionManager.getMetaRegionsForTable(tableName);
   }
 
@@ -147,7 +130,7 @@
       postProcessMeta(m, server);
       unservedRegions.clear();
 
-      return true;
+      return Boolean.TRUE;
     }
   }
 
@@ -178,4 +161,4 @@
 
   protected abstract void postProcessMeta(MetaRegion m,
     HRegionInterface server) throws IOException;
-}
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/FSUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/FSUtils.java?rev=662531&r1=662530&r2=662531&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/FSUtils.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/FSUtils.java Mon Jun  2 12:36:55
2008
@@ -36,6 +36,7 @@
 import org.apache.hadoop.hbase.FileSystemVersionException;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.RemoteExceptionHandler;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
 
 /**
  * Utility methods for interacting with the underlying file system.
@@ -177,4 +178,19 @@
   public static String getPath(Path p) {
     return p.toUri().getPath();
   }
+
+  /**
+   * Delete the directories used by the column family under the passed region.
+   * @param fs Filesystem to use.
+   * @param tabledir The directory under hbase.rootdir for this table.
+   * @param encodedRegionName The region name encoded.
+   * @param family Family to delete.
+   * @throws IOException
+   */
+  public static void deleteColumnFamily(final FileSystem fs,
+    final Path tabledir, final int encodedRegionName, final byte [] family)
+  throws IOException {
+    fs.delete(HStoreFile.getMapDir(tabledir, encodedRegionName, family), true);
+    fs.delete(HStoreFile.getInfoDir(tabledir, encodedRegionName, family), true);
+  }
 }
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java?rev=662531&r1=662530&r2=662531&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java Mon Jun  2 12:36:55
2008
@@ -22,28 +22,36 @@
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Collections;
+import java.util.List;
 import java.util.Map;
 import java.util.SortedMap;
 import java.util.TreeMap;
 
+import org.apache.commons.httpclient.methods.GetMethod;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HStoreKey;
+import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.io.Cell;
+import org.apache.hadoop.hbase.ipc.HRegionInterface;
 import org.apache.hadoop.hbase.regionserver.HLog;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 
 /**
- * Contains utility methods for manipulating HBase meta tables
+ * Contains utility methods for manipulating HBase meta tables.
+ * Be sure to call {@link #shutdown()} when done with this class so it closes
+ * resources opened during meta processing (ROOT, META, etc.).
  */
 public class MetaUtils {
   private static final Log LOG = LogFactory.getLog(MetaUtils.class);
@@ -150,7 +158,11 @@
     return meta;
   }
   
-  /** Closes root region if open. Also closes and deletes the HLog. */
+  /**
+   * Closes catalog regions if open. Also closes and deletes the HLog. You
+   * must call this method if you want to persist changes made during a
+   * MetaUtils edit session.
+   */
   public void shutdown() {
     if (this.rootRegion != null) {
       try {
@@ -209,7 +221,6 @@
     }
     
     // Open root region so we can scan it
-
     if (this.rootRegion == null) {
       openRootRegion();
     }
@@ -261,7 +272,7 @@
     HRegion metaRegion = openMetaRegion(metaRegionInfo);
     scanMetaRegion(metaRegion, listener);
   }
-  
+
   /**
    * Scan the passed in metaregion <code>m</code> invoking the passed
    * <code>listener</code> per row found.
@@ -269,8 +280,7 @@
    * @param listener
    * @throws IOException
    */
-  public void scanMetaRegion(final HRegion m,
-      final ScannerListener listener)
+  public void scanMetaRegion(final HRegion m, final ScannerListener listener)
   throws IOException {
     InternalScanner metaScanner = m.getScanner(HConstants.COL_REGIONINFO_ARRAY,
       HConstants.EMPTY_START_ROW, HConstants.LATEST_TIMESTAMP, null);
@@ -295,13 +305,13 @@
       metaScanner.close();
     }
   }
-  
+
   private void openRootRegion() throws IOException {
     this.rootRegion = HRegion.openHRegion(HRegionInfo.ROOT_REGIONINFO,
         this.rootdir, this.log, this.conf);
     this.rootRegion.compactStores();
   }
-  
+
   private HRegion openMetaRegion(HRegionInfo metaInfo) throws IOException {
     HRegion meta =
       HRegion.openHRegion(metaInfo, this.rootdir, this.log, this.conf);
@@ -339,4 +349,117 @@
     b.delete(HConstants.COL_STARTCODE);
     t.commit(b);
   }
+  
+  /**
+   * Offline version of the online TableOperation,
+   * org.apache.hadoop.hbase.master.AddColumn.
+   * @param tableName
+   * @param hcd Add this column to <code>tableName</code>
+   * @throws IOException 
+   */
+  public void addColumn(final byte [] tableName,
+      final HColumnDescriptor hcd)
+  throws IOException {
+    List<HRegionInfo> metas = getMETARowsInROOT();
+    for (HRegionInfo hri: metas) {
+      final HRegion m = getMetaRegion(hri);
+      scanMetaRegion(m, new ScannerListener() {
+        private boolean inTable = true;
+        
+        @SuppressWarnings("synthetic-access")
+        public boolean processRow(HRegionInfo info) throws IOException {
+          LOG.debug("Testing " + Bytes.toString(tableName) + " against " +
+            Bytes.toString(info.getTableDesc().getName()));
+          if (Bytes.equals(info.getTableDesc().getName(), tableName)) {
+            this.inTable = false;
+            info.getTableDesc().addFamily(hcd);
+            updateMETARegionInfo(m, info);
+            return true;
+          }
+          // If we got here and we have not yet encountered the table yet,
+          // inTable will be false.  Otherwise, we've passed out the table.
+          // Stop the scanner.
+          return this.inTable;
+        }});
+    }
+  }
+  
+  /**
+   * Offline version of the online TableOperation,
+   * org.apache.hadoop.hbase.master.DeleteColumn.
+   * @param tableName
+   * @param columnFamily Name of column name to remove.
+   * @throws IOException
+   */
+  public void deleteColumn(final byte [] tableName,
+      final byte [] columnFamily) throws IOException {
+    List<HRegionInfo> metas = getMETARowsInROOT();
+    final Path tabledir = new Path(rootdir, Bytes.toString(tableName));
+    for (HRegionInfo hri: metas) {
+      final HRegion m = getMetaRegion(hri);
+      scanMetaRegion(m, new ScannerListener() {
+        private boolean inTable = true;
+        
+        @SuppressWarnings("synthetic-access")
+        public boolean processRow(HRegionInfo info) throws IOException {
+          if (Bytes.equals(info.getTableDesc().getName(), tableName)) {
+            this.inTable = false;
+            info.getTableDesc().removeFamily(columnFamily);
+            updateMETARegionInfo(m, info);
+            FSUtils.deleteColumnFamily(fs, tabledir, info.getEncodedName(),
+              HStoreKey.getFamily(columnFamily));
+            return false;
+          }
+          // If we got here and we have not yet encountered the table yet,
+          // inTable will be false.  Otherwise, we've passed out the table.
+          // Stop the scanner.
+          return this.inTable;
+        }});
+    }
+  }
+  
+  private void updateMETARegionInfo(HRegion r, final HRegionInfo hri) 
+  throws IOException {
+    if (LOG.isDebugEnabled()) {
+      HRegionInfo h = Writables.getHRegionInfoOrNull(
+        r.get(hri.getRegionName(), HConstants.COL_REGIONINFO).getValue());
+      LOG.debug("Old " + Bytes.toString(HConstants.COL_REGIONINFO) +
+        " for " + hri.toString() + " in " + r.toString() + " is: " +
+        h.toString());
+    }
+    BatchUpdate b = new BatchUpdate(hri.getRegionName());
+    b.put(HConstants.COL_REGIONINFO, Writables.getBytes(hri));
+    r.batchUpdate(b);
+    if (LOG.isDebugEnabled()) {
+      HRegionInfo h = Writables.getHRegionInfoOrNull(
+          r.get(hri.getRegionName(), HConstants.COL_REGIONINFO).getValue());
+        LOG.debug("New " + Bytes.toString(HConstants.COL_REGIONINFO) +
+          " for " + hri.toString() + " in " + r.toString() + " is: " +
+          h.toString());
+    }
+  }
+
+  /**
+   * @return List of <code>.META.<code> {@link HRegionInfo} found in the
+   * <code>-ROOT-</code> table.
+   * @throws IOException
+   * @see #getMetaRegion(HRegionInfo)
+   */
+  public List<HRegionInfo> getMETARowsInROOT() throws IOException {
+    if (!initialized) {
+      throw new IllegalStateException("Must call initialize method first.");
+    }
+    final List<HRegionInfo> result = new ArrayList<HRegionInfo>();
+    scanRootRegion(new ScannerListener() {
+      @SuppressWarnings("unused")
+      public boolean processRow(HRegionInfo info) throws IOException {
+        if (Bytes.equals(info.getTableDesc().getName(),
+            HConstants.META_TABLE_NAME)) {
+          result.add(info);
+          return false;
+        }
+        return true;
+      }});
+    return result;
+  }
 }
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java?rev=662531&r1=662530&r2=662531&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java Mon Jun
 2 12:36:55 2008
@@ -68,9 +68,9 @@
   }
 
   @Override
-  protected void HBaseClusterSetup() throws Exception {
+  protected void hBaseClusterSetup() throws Exception {
     if (startMiniHBase) {
-      super.HBaseClusterSetup();
+      super.hBaseClusterSetup();
     }
   }
 

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java?rev=662531&r1=662530&r2=662531&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java Mon Jun
 2 12:36:55 2008
@@ -86,7 +86,7 @@
    * Actually start the MiniHBase instance.
    */
   @SuppressWarnings("unused")
-  protected void HBaseClusterSetup() throws Exception {
+  protected void hBaseClusterSetup() throws Exception {
     // start the mini cluster
     this.cluster = new MiniHBaseCluster(conf, regionServers);
     // opening the META table ensures that cluster is running
@@ -125,7 +125,7 @@
       preHBaseClusterSetup();    
     
       // start the instance
-      HBaseClusterSetup();
+      hBaseClusterSetup();
       
       // run post-cluster setup
       postHBaseClusterSetup();

Added: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMetaUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMetaUtils.java?rev=662531&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMetaUtils.java (added)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMetaUtils.java Mon Jun  2
12:36:55 2008
@@ -0,0 +1,58 @@
+/**
+ * Copyright 2008 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import org.apache.hadoop.hbase.HBaseClusterTestCase;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
+
+
+public class TestMetaUtils extends HBaseClusterTestCase {
+  public void testColumnEdits() throws Exception {
+    HBaseAdmin admin = new HBaseAdmin(this.conf);
+    final String oldColumn = "oldcolumn:";
+    // Add three tables
+    for (int i = 0; i < 5; i++) {
+      HTableDescriptor htd = new HTableDescriptor(getName() + i);
+      htd.addFamily(new HColumnDescriptor(oldColumn));
+      admin.createTable(htd);
+    }
+    this.cluster.shutdown();
+    this.cluster = null;
+    MetaUtils utils = new MetaUtils(this.conf);
+    utils.initialize();
+    // Add a new column to the third table, getName() + '2', and remove the old.
+    final byte [] editTable = Bytes.toBytes(getName() + 2);
+    final byte [] newColumn = Bytes.toBytes("newcolumn:");
+    utils.addColumn(editTable, new HColumnDescriptor(newColumn));
+    utils.deleteColumn(editTable, Bytes.toBytes(oldColumn));
+    utils.shutdown();
+    // Now assert columns were added and deleted.
+    this.cluster = new MiniHBaseCluster(this.conf, 1);
+    HTable t = new HTable(conf, editTable);
+    HTableDescriptor htd = t.getMetadata();
+    HColumnDescriptor hcd = htd.getFamily(newColumn);
+    assertTrue(hcd != null);
+    assertNull(htd.getFamily(Bytes.toBytes(oldColumn)));
+  }
+}
\ No newline at end of file



Mime
View raw message