hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jmhs...@apache.org
Subject svn commit: r1448506 [5/5] - in /hbase/trunk: ./ hbase-common/src/main/java/org/apache/hadoop/hbase/ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ hbase-protocol/src/main/protobuf/ hbase-server/src/main/avro/ hbase-server/src...
Date Thu, 21 Feb 2013 03:38:08 GMT
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java?rev=1448506&r1=1448505&r2=1448506&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
Thu Feb 21 03:38:05 2013
@@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.master.HM
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
 import org.apache.hadoop.hbase.master.RegionPlan;
 import org.apache.hadoop.hbase.master.RegionState;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.protobuf.RequestConverter;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
@@ -62,7 +63,7 @@ import org.junit.experimental.categories
 public class TestMasterObserver {
   private static final Log LOG = LogFactory.getLog(TestMasterObserver.class);
 
-  public static CountDownLatch countDown = new CountDownLatch(1);
+  public static CountDownLatch tableCreationLatch = new CountDownLatch(1);
 
   public static class CPMasterObserver implements MasterObserver {
 
@@ -98,6 +99,14 @@ public class TestMasterObserver {
     private boolean postStartMasterCalled;
     private boolean startCalled;
     private boolean stopCalled;
+    private boolean preSnapshotCalled;
+    private boolean postSnapshotCalled;
+    private boolean preCloneSnapshotCalled;
+    private boolean postCloneSnapshotCalled;
+    private boolean preRestoreSnapshotCalled;
+    private boolean postRestoreSnapshotCalled;
+    private boolean preDeleteSnapshotCalled;
+    private boolean postDeleteSnapshotCalled;
     private boolean preCreateTableHandlerCalled;
     private boolean postCreateTableHandlerCalled;
     private boolean preDeleteTableHandlerCalled;
@@ -115,7 +124,6 @@ public class TestMasterObserver {
     private boolean preModifyTableHandlerCalled;
     private boolean postModifyTableHandlerCalled;
 
-
     public void enableBypass(boolean bypass) {
       this.bypass = bypass;
     }
@@ -147,6 +155,14 @@ public class TestMasterObserver {
       postBalanceCalled = false;
       preBalanceSwitchCalled = false;
       postBalanceSwitchCalled = false;
+      preSnapshotCalled = false;
+      postSnapshotCalled = false;
+      preCloneSnapshotCalled = false;
+      postCloneSnapshotCalled = false;
+      preRestoreSnapshotCalled = false;
+      postRestoreSnapshotCalled = false;
+      preDeleteSnapshotCalled = false;
+      postDeleteSnapshotCalled = false;
       preCreateTableHandlerCalled = false;
       postCreateTableHandlerCalled = false;
       preDeleteTableHandlerCalled = false;
@@ -509,6 +525,76 @@ public class TestMasterObserver {
     public boolean wasStopped() { return stopCalled; }
 
     @Override
+    public void preSnapshot(final ObserverContext<MasterCoprocessorEnvironment> ctx,
+        final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor)
+        throws IOException {
+      preSnapshotCalled = true;
+    }
+
+    @Override
+    public void postSnapshot(final ObserverContext<MasterCoprocessorEnvironment> ctx,
+        final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor)
+        throws IOException {
+      postSnapshotCalled = true;
+    }
+
+    public boolean wasSnapshotCalled() {
+      return preSnapshotCalled && postSnapshotCalled;
+    }
+
+    @Override
+    public void preCloneSnapshot(final ObserverContext<MasterCoprocessorEnvironment>
ctx,
+        final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor)
+        throws IOException {
+      preCloneSnapshotCalled = true;
+    }
+
+    @Override
+    public void postCloneSnapshot(final ObserverContext<MasterCoprocessorEnvironment>
ctx,
+        final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor)
+        throws IOException {
+      postCloneSnapshotCalled = true;
+    }
+
+    public boolean wasCloneSnapshotCalled() {
+      return preCloneSnapshotCalled && postCloneSnapshotCalled;
+    }
+
+    @Override
+    public void preRestoreSnapshot(final ObserverContext<MasterCoprocessorEnvironment>
ctx,
+        final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor)
+        throws IOException {
+      preRestoreSnapshotCalled = true;
+    }
+
+    @Override
+    public void postRestoreSnapshot(final ObserverContext<MasterCoprocessorEnvironment>
ctx,
+        final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor)
+        throws IOException {
+      postRestoreSnapshotCalled = true;
+    }
+
+    public boolean wasRestoreSnapshotCalled() {
+      return preRestoreSnapshotCalled && postRestoreSnapshotCalled;
+    }
+
+    @Override
+    public void preDeleteSnapshot(final ObserverContext<MasterCoprocessorEnvironment>
ctx,
+        final SnapshotDescription snapshot) throws IOException {
+      preDeleteSnapshotCalled = true;
+    }
+
+    @Override
+    public void postDeleteSnapshot(final ObserverContext<MasterCoprocessorEnvironment>
ctx,
+        final SnapshotDescription snapshot) throws IOException {
+      postDeleteSnapshotCalled = true;
+    }
+
+    public boolean wasDeleteSnapshotCalled() {
+      return preDeleteSnapshotCalled && postDeleteSnapshotCalled;
+    }
+
+    @Override
     public void preCreateTableHandler(
         ObserverContext<MasterCoprocessorEnvironment> env,
         HTableDescriptor desc, HRegionInfo[] regions) throws IOException {
@@ -523,7 +609,7 @@ public class TestMasterObserver {
         ObserverContext<MasterCoprocessorEnvironment> ctx,
         HTableDescriptor desc, HRegionInfo[] regions) throws IOException {
       postCreateTableHandlerCalled = true;
-      countDown.countDown();
+      tableCreationLatch.countDown();
     }
 
     public boolean wasPreCreateTableHandlerCalled(){
@@ -711,7 +797,9 @@ public class TestMasterObserver {
   }
 
   private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
+  private static byte[] TEST_SNAPSHOT = Bytes.toBytes("observed_snapshot");
   private static byte[] TEST_TABLE = Bytes.toBytes("observed_table");
+  private static byte[] TEST_CLONE = Bytes.toBytes("observed_clone");
   private static byte[] TEST_FAMILY = Bytes.toBytes("fam1");
   private static byte[] TEST_FAMILY2 = Bytes.toBytes("fam2");
   private static byte[] TEST_FAMILY3 = Bytes.toBytes("fam3");
@@ -721,6 +809,11 @@ public class TestMasterObserver {
     Configuration conf = UTIL.getConfiguration();
     conf.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY,
         CPMasterObserver.class.getName());
+    conf.set("hbase.master.hfilecleaner.plugins",
+      "org.apache.hadoop.hbase.master.cleaner.HFileLinkCleaner," +
+      "org.apache.hadoop.hbase.master.snapshot.SnapshotHFileCleaner");
+    conf.set("hbase.master.logcleaner.plugins",
+      "org.apache.hadoop.hbase.master.snapshot.SnapshotLogCleaner");
     // We need more than one data server on this test
     UTIL.startMiniCluster(2);
   }
@@ -765,16 +858,17 @@ public class TestMasterObserver {
     htd.addFamily(new HColumnDescriptor(TEST_FAMILY));
     HBaseAdmin admin = UTIL.getHBaseAdmin();
 
+    tableCreationLatch = new CountDownLatch(1);
     admin.createTable(htd);
     // preCreateTable can't bypass default action.
     assertTrue("Test table should be created", cp.wasCreateTableCalled());
-    countDown.await();
+    tableCreationLatch.await();
     assertTrue("Table pre create handler called.", cp
         .wasPreCreateTableHandlerCalled());
     assertTrue("Table create handler should be called.",
         cp.wasCreateTableHandlerCalled());
 
-    countDown = new CountDownLatch(1);
+    tableCreationLatch = new CountDownLatch(1);
     admin.disableTable(TEST_TABLE);
     assertTrue(admin.isTableDisabled(TEST_TABLE));
     // preDisableTable can't bypass default action.
@@ -831,7 +925,7 @@ public class TestMasterObserver {
 
     admin.createTable(htd);
     assertTrue("Test table should be created", cp.wasCreateTableCalled());
-    countDown.await();
+    tableCreationLatch.await();
     assertTrue("Table pre create handler called.", cp
         .wasPreCreateTableHandlerCalled());
     assertTrue("Table create handler should be called.",
@@ -908,6 +1002,62 @@ public class TestMasterObserver {
         cp.wasDeleteTableHandlerCalled());
   }
 
+  @Test
+  public void testSnapshotOperations() throws Exception {
+    MiniHBaseCluster cluster = UTIL.getHBaseCluster();
+    HMaster master = cluster.getMaster();
+    MasterCoprocessorHost host = master.getCoprocessorHost();
+    CPMasterObserver cp = (CPMasterObserver)host.findCoprocessor(
+        CPMasterObserver.class.getName());
+    cp.resetStates();
+
+    // create a table
+    HTableDescriptor htd = new HTableDescriptor(TEST_TABLE);
+    htd.addFamily(new HColumnDescriptor(TEST_FAMILY));
+    HBaseAdmin admin = UTIL.getHBaseAdmin();
+
+    tableCreationLatch = new CountDownLatch(1);
+    admin.createTable(htd);
+    tableCreationLatch.await();
+    tableCreationLatch = new CountDownLatch(1);
+
+    admin.disableTable(TEST_TABLE);
+    assertTrue(admin.isTableDisabled(TEST_TABLE));
+
+    try {
+      // Test snapshot operation
+      assertFalse("Coprocessor should not have been called yet",
+        cp.wasSnapshotCalled());
+      admin.snapshot(TEST_SNAPSHOT, TEST_TABLE);
+      assertTrue("Coprocessor should have been called on snapshot",
+        cp.wasSnapshotCalled());
+
+      // Test clone operation
+      admin.cloneSnapshot(TEST_SNAPSHOT, TEST_CLONE);
+      assertTrue("Coprocessor should have been called on snapshot clone",
+        cp.wasCloneSnapshotCalled());
+      assertFalse("Coprocessor restore should not have been called on snapshot clone",
+        cp.wasRestoreSnapshotCalled());
+      admin.disableTable(TEST_CLONE);
+      assertTrue(admin.isTableDisabled(TEST_TABLE));
+      admin.deleteTable(TEST_CLONE);
+
+      // Test restore operation
+      cp.resetStates();
+      admin.restoreSnapshot(TEST_SNAPSHOT);
+      assertTrue("Coprocessor should have been called on snapshot restore",
+        cp.wasRestoreSnapshotCalled());
+      assertFalse("Coprocessor clone should not have been called on snapshot restore",
+        cp.wasCloneSnapshotCalled());
+
+      admin.deleteSnapshot(TEST_SNAPSHOT);
+      assertTrue("Coprocessor should have been called on snapshot delete",
+        cp.wasDeleteSnapshotCalled());
+    } finally {
+      admin.deleteTable(TEST_TABLE);
+    }
+  }
+
   private void modifyTableSync(HBaseAdmin admin, byte[] tableName, HTableDescriptor htd)
       throws IOException {
     admin.modifyTable(tableName, htd);

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java?rev=1448506&r1=1448505&r2=1448506&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
Thu Feb 21 03:38:05 2013
@@ -204,6 +204,11 @@ public class TestCatalogJanitor {
     }
 
     @Override
+    public MasterCoprocessorHost getCoprocessorHost() {
+      return null;
+    }
+
+    @Override
     public ServerManager getServerManager() {
       return null;
     }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java?rev=1448506&r1=1448505&r2=1448506&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java
Thu Feb 21 03:38:05 2013
@@ -56,9 +56,7 @@ public class TestHFileLinkCleaner {
   public void testHFileLinkCleaning() throws Exception {
     Configuration conf = TEST_UTIL.getConfiguration();
     conf.set(HConstants.HBASE_DIR, TEST_UTIL.getDataTestDir().toString());
-    conf.set(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS,
-             "org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner," +
-             "org.apache.hadoop.hbase.master.cleaner.HFileLinkCleaner");
+    conf.set(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS, HFileLinkCleaner.class.getName());
     Path rootDir = FSUtils.getRootDir(conf);
     FileSystem fs = FileSystem.get(conf);
 
@@ -100,14 +98,12 @@ public class TestHFileLinkCleaner {
     HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archiveDir);
 
     // Link backref cannot be removed
-    Thread.sleep(ttl * 2);
     cleaner.chore();
     assertTrue(fs.exists(linkBackRef));
     assertTrue(fs.exists(hfilePath));
 
     // Link backref can be removed
     fs.rename(new Path(rootDir, tableLinkName), new Path(archiveDir, tableLinkName));
-    Thread.sleep(ttl * 2);
     cleaner.chore();
     assertFalse("Link should be deleted", fs.exists(linkBackRef));
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java?rev=1448506&r1=1448505&r2=1448506&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
Thu Feb 21 03:38:05 2013
@@ -49,7 +49,6 @@ import org.apache.hadoop.hbase.io.hfile.
 import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
 import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder;
-import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.util.BloomFilterFactory;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ChecksumType;
@@ -90,8 +89,8 @@ public class TestStoreFile extends HBase
    * @throws Exception
    */
   public void testBasicHalfMapFile() throws Exception {
-    // Make up a directory hierarchy that has a regiondir and familyname.
-    Path outputDir = new Path(new Path(this.testDir, "regionname"),
+    // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
+    Path outputDir = new Path(new Path(this.testDir, "7e0102"),
         "familyname");
     StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf,
         this.fs, 2 * 1024)
@@ -105,6 +104,10 @@ public class TestStoreFile extends HBase
   private void writeStoreFile(final StoreFile.Writer writer) throws IOException {
     writeStoreFile(writer, Bytes.toBytes(getName()), Bytes.toBytes(getName()));
   }
+
+  // pick an split point (roughly halfway)
+  byte[] SPLITKEY = new byte[] { (LAST_CHAR-FIRST_CHAR)/2, FIRST_CHAR};
+
   /*
    * Writes HStoreKey and ImmutableBytes data to passed writer and
    * then closes it.
@@ -133,12 +136,12 @@ public class TestStoreFile extends HBase
    */
   public void testReference()
   throws IOException {
-    Path storedir = new Path(new Path(this.testDir, "regionname"), "familyname");
-    Path dir = new Path(storedir, "1234567890");
+    // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
+    Path storedir = new Path(new Path(this.testDir, "7e0102"), "familyname");
     // Make a store file and write data to it.
     StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf,
         this.fs, 8 * 1024)
-            .withOutputDir(dir)
+            .withOutputDir(storedir)
             .build();
     writeStoreFile(writer);
     StoreFile hsf = new StoreFile(this.fs, writer.getPath(), conf, cacheConf,
@@ -152,7 +155,7 @@ public class TestStoreFile extends HBase
     kv = KeyValue.createKeyValueFromKey(reader.getLastKey());
     byte [] finalRow = kv.getRow();
     // Make a reference
-    Path refPath = StoreFile.split(fs, dir, hsf, midRow, true);
+    Path refPath = StoreFile.split(fs, storedir, hsf, midRow, true);
     StoreFile refHsf = new StoreFile(this.fs, refPath, conf, cacheConf,
         BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
     // Now confirm that I can read from the reference and that it only gets
@@ -171,12 +174,17 @@ public class TestStoreFile extends HBase
 
   public void testHFileLink() throws IOException {
     final String columnFamily = "f";
+
+    // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ 
+    Configuration testConf = new Configuration(this.conf); 
+    FSUtils.setRootDir(testConf, this.testDir);  
+
     HRegionInfo hri = new HRegionInfo(Bytes.toBytes("table-link"));
-    Path storedir = new Path(new Path(FSUtils.getRootDir(conf),
-      new Path(hri.getTableNameAsString(), hri.getEncodedName())), columnFamily);
+    Path storedir = new Path(new Path(this.testDir,
+        new Path(hri.getTableNameAsString(), hri.getEncodedName())), columnFamily);
 
     // Make a store file and write data to it.
-    StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf,
+    StoreFile.Writer writer = new StoreFile.WriterBuilder(testConf, cacheConf,
          this.fs, 8 * 1024)
             .withOutputDir(storedir)
             .build();
@@ -184,13 +192,13 @@ public class TestStoreFile extends HBase
     writeStoreFile(writer);
     writer.close();
 
-    Path dstPath = new Path(FSUtils.getRootDir(conf), new Path("test-region", columnFamily));
-    HFileLink.create(conf, this.fs, dstPath, hri, storeFilePath.getName());
+    Path dstPath = new Path(this.testDir, new Path("test-region", columnFamily));
+    HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
     Path linkFilePath = new Path(dstPath,
                   HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
 
     // Try to open store file from link
-    StoreFile hsf = new StoreFile(this.fs, linkFilePath, conf, cacheConf,
+    StoreFile hsf = new StoreFile(this.fs, linkFilePath, testConf, cacheConf,
         BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
     assertTrue(hsf.isLink());
 
@@ -204,6 +212,108 @@ public class TestStoreFile extends HBase
     assertEquals((LAST_CHAR - FIRST_CHAR + 1) * (LAST_CHAR - FIRST_CHAR + 1), count);
   }
 
+  /**
+   * Validate that we can handle valid tables with '.', '_', and '-' chars.
+   */
+  public void testStoreFileNames() {
+    String[] legalHFileLink = { "MyTable_02=abc012-def345", "MyTable_02.300=abc012-def345",
+      "MyTable_02-400=abc012-def345", "MyTable_02-400.200=abc012-def345",
+      "MyTable_02=abc012-def345_SeqId_1_", "MyTable_02=abc012-def345_SeqId_20_" };
+    for (String name: legalHFileLink) {
+      assertTrue("should be a valid link: " + name, HFileLink.isHFileLink(name));
+      assertTrue("should be a valid StoreFile" + name, StoreFile.validateStoreFileName(name));
+      assertFalse("should not be a valid reference: " + name, StoreFile.isReference(name));
+
+      String refName = name + ".6789";
+      assertTrue("should be a valid link reference: " + refName, StoreFile.isReference(refName));
+      assertTrue("should be a valid StoreFile" + refName, StoreFile.validateStoreFileName(refName));
+    }
+
+    String[] illegalHFileLink = { ".MyTable_02=abc012-def345", "-MyTable_02.300=abc012-def345",
+      "MyTable_02-400=abc0_12-def345", "MyTable_02-400.200=abc012-def345...." };
+    for (String name: illegalHFileLink) {
+      assertFalse("should not be a valid link: " + name, HFileLink.isHFileLink(name));
+    }
+  }
+
+  /**
+   * This test creates an hfile and then the dir structures and files to verify that references
+   * to hfilelinks (created by snapshot clones) can be properly interpreted.
+   */
+  public void testReferenceToHFileLink() throws IOException {
+    final String columnFamily = "f";
+
+    Path rootDir = FSUtils.getRootDir(conf);
+
+    String tablename = "_original-evil-name"; // adding legal table name chars to verify
regex handles it.
+    HRegionInfo hri = new HRegionInfo(Bytes.toBytes(tablename));
+    // store dir = <root>/<tablename>/<rgn>/<cf>
+    Path storedir = new Path(new Path(rootDir,
+      new Path(hri.getTableNameAsString(), hri.getEncodedName())), columnFamily);
+
+    // Make a store file and write data to it. <root>/<tablename>/<rgn>/<cf>/<file>
+    StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf,
+         this.fs, 8 * 1024)
+            .withOutputDir(storedir)
+            .build();
+    Path storeFilePath = writer.getPath();
+    writeStoreFile(writer);
+    writer.close();
+
+    // create link to store file. <root>/clone/region/<cf>/<hfile>-<region>-<table>
+    String target = "clone";
+    Path dstPath = new Path(rootDir, new Path(new Path(target, "7e0102"), columnFamily));
+    HFileLink.create(conf, this.fs, dstPath, hri, storeFilePath.getName());
+    Path linkFilePath = new Path(dstPath,
+                  HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
+
+    // create splits of the link.
+    // <root>/clone/splitA/<cf>/<reftohfilelink>,
+    // <root>/clone/splitB/<cf>/<reftohfilelink>
+    Path splitDirA = new Path(new Path(rootDir,
+        new Path(target, "571A")), columnFamily);
+    Path splitDirB = new Path(new Path(rootDir,
+        new Path(target, "571B")), columnFamily);
+    StoreFile f = new StoreFile(fs, linkFilePath, conf, cacheConf, BloomType.NONE,
+        NoOpDataBlockEncoder.INSTANCE);
+    byte[] splitRow = SPLITKEY;
+    Path pathA = StoreFile.split(fs, splitDirA, f, splitRow, true); // top
+    Path pathB = StoreFile.split(fs, splitDirB, f, splitRow, false); // bottom
+
+    // OK test the thing
+    FSUtils.logFileSystemState(fs, rootDir, LOG);
+
+    // There is a case where a file with the hfilelink pattern is actually a daughter
+    // reference to a hfile link.  This code in StoreFile that handles this case.
+    
+    // Try to open store file from link
+    StoreFile hsfA = new StoreFile(this.fs, pathA,  conf, cacheConf,
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
+
+    // Now confirm that I can read from the ref to link
+    int count = 1;
+    HFileScanner s = hsfA.createReader().getScanner(false, false);
+    s.seekTo();
+    while (s.next()) {
+      count++;
+    }
+    assertTrue(count > 0); // read some rows here
+    
+    // Try to open store file from link
+    StoreFile hsfB = new StoreFile(this.fs, pathB,  conf, cacheConf,
+        BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
+
+    // Now confirm that I can read from the ref to link
+    HFileScanner sB = hsfB.createReader().getScanner(false, false);
+    sB.seekTo();
+    while (sB.next()) {
+      count++;
+    }
+
+    // read the rest of the rows
+    assertEquals((LAST_CHAR - FIRST_CHAR + 1) * (LAST_CHAR - FIRST_CHAR + 1), count);
+  }
+
   private void checkHalfHFile(final StoreFile f)
   throws IOException {
     byte [] midkey = f.createReader().midkey();
@@ -672,8 +782,8 @@ public class TestStoreFile extends HBase
     long[] timestamps = new long[] {20,10,5,1};
     Scan scan = new Scan();
 
-    Path storedir = new Path(new Path(this.testDir, "regionname"),
-    "familyname");
+    // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
+    Path storedir = new Path(new Path(this.testDir, "7e0102"), "familyname");
     Path dir = new Path(storedir, "1234567890");
     StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf,
         this.fs, 8 * 1024)
@@ -716,8 +826,8 @@ public class TestStoreFile extends HBase
   public void testCacheOnWriteEvictOnClose() throws Exception {
     Configuration conf = this.conf;
 
-    // Find a home for our files
-    Path baseDir = new Path(new Path(this.testDir, "regionname"),"twoCOWEOC");
+    // Find a home for our files (regiondir ("7e0102") and familyname).
+    Path baseDir = new Path(new Path(this.testDir, "7e0102"),"twoCOWEOC");
 
     // Grab the block cache and get the initial hit/miss counts
     BlockCache bc = new CacheConfig(conf).getBlockCache();
@@ -869,7 +979,8 @@ public class TestStoreFile extends HBase
    * file info.
    */
   public void testDataBlockEncodingMetaData() throws IOException {
-    Path dir = new Path(new Path(this.testDir, "regionname"), "familyname");
+    // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
+    Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
     Path path = new Path(dir, "1234567890");
 
     DataBlockEncoding dataBlockEncoderAlgo =

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java?rev=1448506&r1=1448505&r2=1448506&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
Thu Feb 21 03:38:05 2013
@@ -121,6 +121,11 @@ public class TestAccessController {
   public static void setupBeforeClass() throws Exception {
     // setup configuration
     conf = TEST_UTIL.getConfiguration();
+    conf.set("hbase.master.hfilecleaner.plugins",
+      "org.apache.hadoop.hbase.master.cleaner.HFileLinkCleaner," +
+      "org.apache.hadoop.hbase.master.snapshot.SnapshotHFileCleaner");
+    conf.set("hbase.master.logcleaner.plugins",
+      "org.apache.hadoop.hbase.master.snapshot.SnapshotLogCleaner");
     SecureTestUtil.enableSecurity(conf);
 
     TEST_UTIL.startMiniCluster();
@@ -1796,4 +1801,50 @@ public class TestAccessController {
     verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
   }
 
+  @Test
+  public void testSnapshot() throws Exception {
+    PrivilegedExceptionAction snapshotAction = new PrivilegedExceptionAction() {
+      public Object run() throws Exception {
+        ACCESS_CONTROLLER.preSnapshot(ObserverContext.createAndPrepare(CP_ENV, null),
+          null, null);
+        return null;
+      }
+    };
+
+    PrivilegedExceptionAction deleteAction = new PrivilegedExceptionAction() {
+      public Object run() throws Exception {
+        ACCESS_CONTROLLER.preDeleteSnapshot(ObserverContext.createAndPrepare(CP_ENV, null),
+          null);
+        return null;
+      }
+    };
+
+    PrivilegedExceptionAction restoreAction = new PrivilegedExceptionAction() {
+      public Object run() throws Exception {
+        ACCESS_CONTROLLER.preRestoreSnapshot(ObserverContext.createAndPrepare(CP_ENV, null),
+          null, null);
+        return null;
+      }
+    };
+
+    PrivilegedExceptionAction cloneAction = new PrivilegedExceptionAction() {
+      public Object run() throws Exception {
+        ACCESS_CONTROLLER.preCloneSnapshot(ObserverContext.createAndPrepare(CP_ENV, null),
+          null, null);
+        return null;
+      }
+    };
+
+    verifyAllowed(snapshotAction, SUPERUSER, USER_ADMIN);
+    verifyDenied(snapshotAction, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
+
+    verifyAllowed(cloneAction, SUPERUSER, USER_ADMIN);
+    verifyDenied(deleteAction, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
+
+    verifyAllowed(restoreAction, SUPERUSER, USER_ADMIN);
+    verifyDenied(restoreAction, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
+
+    verifyAllowed(deleteAction, SUPERUSER, USER_ADMIN);
+    verifyDenied(cloneAction, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
+  }
 }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java?rev=1448506&r1=1448505&r2=1448506&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java
Thu Feb 21 03:38:05 2013
@@ -50,9 +50,10 @@ public class TestHFileArchiveUtil {
   
   @Test
   public void testRegionArchiveDir() {
+    Configuration conf = null;
     Path tableDir = new Path("table");
     Path regionDir = new Path("region");
-    assertNotNull(HFileArchiveUtil.getRegionArchiveDir(null, tableDir, regionDir));
+    assertNotNull(HFileArchiveUtil.getRegionArchiveDir(conf, tableDir, regionDir));
   }
   
   @Test



Mime
View raw message