hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From bryanduxb...@apache.org
Subject svn commit: r627918 [3/3] - in /hadoop/hbase/trunk: ./ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/generated/master/ src/java/org/apache/hadoop/hbase/hql/ src/java/org/apache/hadoop/hbase/...
Date Fri, 15 Feb 2008 00:29:08 GMT
Added: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestHTable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestHTable.java?rev=627918&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestHTable.java (added)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestHTable.java Thu Feb 14
16:29:04 2008
@@ -0,0 +1,177 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.hadoop.io.Text;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseClusterTestCase;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableNotFoundException;
+import org.apache.hadoop.hbase.HScannerInterface;
+import org.apache.hadoop.hbase.HStoreKey;
+
+/**
+ * Tests HTable
+ */
+public class TestHTable extends HBaseClusterTestCase implements HConstants {
+  private static final Log LOG = LogFactory.getLog(TestHTable.class);
+  private static final HColumnDescriptor column =
+    new HColumnDescriptor(COLUMN_FAMILY.toString());
+
+  private static final Text nosuchTable = new Text("nosuchTable");
+  private static final Text tableAname = new Text("tableA");
+  private static final Text tableBname = new Text("tableB");
+  
+  private static final Text row = new Text("row");
+ 
+  /**
+   * the test
+   * @throws IOException
+   */
+  public void testHTable() throws IOException {
+    byte[] value = "value".getBytes(UTF8_ENCODING);
+    
+    try {
+      new HTable(conf, nosuchTable);
+      
+    } catch (TableNotFoundException e) {
+      // expected
+
+    } catch (IOException e) {
+      e.printStackTrace();
+      fail();
+    }
+    
+    HTableDescriptor tableAdesc = new HTableDescriptor(tableAname.toString());
+    tableAdesc.addFamily(column);
+    
+    HTableDescriptor tableBdesc = new HTableDescriptor(tableBname.toString());
+    tableBdesc.addFamily(column);
+
+    // create a couple of tables
+    
+    HBaseAdmin admin = new HBaseAdmin(conf);
+    admin.createTable(tableAdesc);
+    admin.createTable(tableBdesc);
+    
+    // put some data into table A
+    
+    HTable a = new HTable(conf, tableAname);
+    
+    // Assert the metadata is good.
+    HTableDescriptor meta = a.getMetadata();
+    assertTrue(meta.equals(tableAdesc));
+    
+    long lockid = a.startUpdate(row);
+    a.put(lockid, COLUMN_FAMILY, value);
+    a.commit(lockid);
+    
+    // open a new connection to A and a connection to b
+    
+    HTable newA = new HTable(conf, tableAname);
+    HTable b = new HTable(conf, tableBname);
+
+    // copy data from A to B
+    
+    HScannerInterface s =
+      newA.obtainScanner(COLUMN_FAMILY_ARRAY, EMPTY_START_ROW);
+    
+    try {
+      HStoreKey key = new HStoreKey();
+      TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
+      while(s.next(key, results)) {
+        lockid = b.startUpdate(key.getRow());
+        for(Map.Entry<Text, byte[]> e: results.entrySet()) {
+          b.put(lockid, e.getKey(), e.getValue());
+        }
+        b.commit(lockid);
+        b.abort(lockid);
+      }
+    } finally {
+      s.close();
+    }
+    
+    // Opening a new connection to A will cause the tables to be reloaded
+
+    try {
+      HTable anotherA = new HTable(conf, tableAname);
+      anotherA.get(row, COLUMN_FAMILY);
+    } catch (Exception e) {
+      e.printStackTrace();
+      fail();
+    }
+    
+    // We can still access A through newA because it has the table information
+    // cached. And if it needs to recalibrate, that will cause the information
+    // to be reloaded.
+    
+  }
+  
+  /**
+    * For HADOOP-2579
+    */
+  public void testTableNotFoundExceptionWithoutAnyTables() {
+    try {
+      new HTable(conf, new Text("notATable"));
+      fail("Should have thrown a TableNotFoundException");
+    } catch (TableNotFoundException e) {
+      // expected
+    } catch (IOException e) {
+      e.printStackTrace();
+      fail("Should have thrown a TableNotFoundException instead of a " +
+        e.getClass());
+    }
+  }
+  
+  /**
+    * For HADOOP-2579
+    */
+  public void testTableNotFoundExceptionWithATable() {
+    try {
+      HColumnDescriptor column =
+        new HColumnDescriptor(COLUMN_FAMILY.toString());
+      HBaseAdmin admin = new HBaseAdmin(conf);
+      HTableDescriptor testTableADesc =
+        new HTableDescriptor("table");
+      testTableADesc.addFamily(column);
+      admin.createTable(testTableADesc);
+
+      // This should throw a TableNotFoundException, it has not been created
+      new HTable(conf, new Text("notATable"));
+      
+      fail("Should have thrown a TableNotFoundException");
+    } catch (TableNotFoundException e) {
+      // expected
+    } catch (IOException e) {
+      e.printStackTrace();
+      fail("Should have thrown a TableNotFoundException instead of a " +
+        e.getClass());
+    }
+  }
+  
+}

Added: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestListTables.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestListTables.java?rev=627918&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestListTables.java (added)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestListTables.java Thu Feb
14 16:29:04 2008
@@ -0,0 +1,72 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import org.apache.hadoop.hbase.HBaseClusterTestCase;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+
+/**
+ * Tests the listTables client API
+ */
+public class TestListTables extends HBaseClusterTestCase {
+  HBaseAdmin admin = null;
+  
+  private static final HTableDescriptor[] tables = {
+      new HTableDescriptor("table1"),
+      new HTableDescriptor("table2"),
+      new HTableDescriptor("table3")
+  };
+  
+  /** {@inheritDoc} */
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    
+    admin = new HBaseAdmin(conf);
+
+    HColumnDescriptor family =
+      new HColumnDescriptor(HConstants.COLUMN_FAMILY_STR);
+    
+    for (int i = 0; i < tables.length; i++) {
+      tables[i].addFamily(family);
+      admin.createTable(tables[i]);
+    }
+  }
+
+  /**
+   * the test
+   * @throws IOException
+   */
+  public void testListTables() throws IOException {
+    HashSet<HTableDescriptor> result =
+      new HashSet<HTableDescriptor>(Arrays.asList(admin.listTables()));
+    
+    int size = result.size();
+    assertEquals(tables.length, size);
+    for (int i = 0; i < tables.length && i < size; i++) {
+      assertTrue(result.contains(tables[i]));
+    }
+  }
+}

Added: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestMultipleUpdates.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestMultipleUpdates.java?rev=627918&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestMultipleUpdates.java (added)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestMultipleUpdates.java Thu
Feb 14 16:29:04 2008
@@ -0,0 +1,102 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hbase.HBaseClusterTestCase;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+
+/**
+ * Tests that HClient protects against multiple updates
+ */
+public class TestMultipleUpdates extends HBaseClusterTestCase {
+  private static final String CONTENTS_STR = "contents:";
+  private static final Text CONTENTS = new Text(CONTENTS_STR);
+  private static final byte[] value = { 1, 2, 3, 4 };
+
+  private HTableDescriptor desc = null;
+  private HTable table = null;
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    this.desc = new HTableDescriptor("test");
+    desc.addFamily(new HColumnDescriptor(CONTENTS_STR));
+    HBaseAdmin admin = new HBaseAdmin(conf);
+    admin.createTable(desc);
+    table = new HTable(conf, desc.getName());
+  }
+
+  /** the test */
+  public void testMultipleUpdates() {
+    try {
+      long lockid = table.startUpdate(new Text("row1"));
+      
+      try {
+        long lockid2 = table.startUpdate(new Text("row2"));
+        throw new Exception("second startUpdate returned lock id " + lockid2);
+        
+      } catch (IllegalStateException i) {
+        // expected
+      }
+      
+      long invalidid = 42;
+      
+      try {
+        table.put(invalidid, CONTENTS, value);
+        
+      } catch (IllegalArgumentException i) {
+        // expected
+      }
+      
+      try {
+        table.delete(invalidid, CONTENTS);
+        
+      } catch (IllegalArgumentException i) {
+        // expected
+      }
+      
+      try {
+        table.abort(invalidid);
+        
+      } catch (IllegalArgumentException i) {
+        // expected
+      }
+      
+      try {
+        table.commit(invalidid);
+        
+      } catch (IllegalArgumentException i) {
+        // expected
+      }
+      
+      table.abort(lockid);
+      
+    } catch (Exception e) {
+      System.err.println("unexpected exception");
+      e.printStackTrace();
+      fail();
+    }
+  }
+}

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/hql/TestHQL.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/hql/TestHQL.java?rev=627918&r1=627917&r2=627918&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/hql/TestHQL.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/hql/TestHQL.java Thu Feb 14 16:29:04
2008
@@ -28,7 +28,7 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.HBaseClusterTestCase;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -173,4 +173,4 @@
       new String(this.baos.toByteArray(), HConstants.UTF8_ENCODING));
     this.baos.reset();
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java?rev=627918&r1=627917&r2=627918&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java Thu Feb
14 16:29:04 2008
@@ -34,13 +34,13 @@
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegion;
 import org.apache.hadoop.hbase.HScannerInterface;
 import org.apache.hadoop.hbase.HStoreKey;
-import org.apache.hadoop.hbase.HTable;
+import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.MultiRegionTable;
@@ -326,4 +326,4 @@
   public static void main(@SuppressWarnings("unused") String[] args) {
     TestRunner.run(new TestSuite(TestTableIndex.class));
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java?rev=627918&r1=627917&r2=627918&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java Thu
Feb 14 16:29:04 2008
@@ -28,12 +28,12 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.dfs.MiniDFSCluster;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HScannerInterface;
 import org.apache.hadoop.hbase.HStoreKey;
-import org.apache.hadoop.hbase.HTable;
+import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.MultiRegionTable;
@@ -429,4 +429,4 @@
       scanner.close();
     }
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/master/OOMEHMaster.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/master/OOMEHMaster.java?rev=627918&r1=627917&r2=627918&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/master/OOMEHMaster.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/master/OOMEHMaster.java Thu Feb 14
16:29:04 2008
@@ -61,4 +61,4 @@
   public static void main(String[] args) {
     doMain(args, OOMEHMaster.class);
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk/src/webapps/master/master.jsp
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/webapps/master/master.jsp?rev=627918&r1=627917&r2=627918&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/webapps/master/master.jsp (original)
+++ hadoop/hbase/trunk/src/webapps/master/master.jsp Thu Feb 14 16:29:04 2008
@@ -5,7 +5,7 @@
   import="org.apache.hadoop.hbase.master.HMaster"
   import="org.apache.hadoop.hbase.HConstants"
   import="org.apache.hadoop.hbase.master.MetaRegion"
-  import="org.apache.hadoop.hbase.HBaseAdmin"
+  import="org.apache.hadoop.hbase.client.HBaseAdmin"
   import="org.apache.hadoop.hbase.HServerInfo"
   import="org.apache.hadoop.hbase.HServerAddress"
   import="org.apache.hadoop.hbase.HBaseConfiguration"



Mime
View raw message