hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zhang...@apache.org
Subject [hbase] branch branch-2 updated: HBASE-23782 We still reference the hard coded meta descriptor in some places when listing table descriptors (#1115)
Date Mon, 03 Feb 2020 06:32:49 GMT
This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2 by this push:
     new f94dbeb  HBASE-23782 We still reference the hard coded meta descriptor in some places
when listing table descriptors (#1115)
f94dbeb is described below

commit f94dbebffa14632e9fd4ea037a9f6558b5bf3431
Author: Duo Zhang <zhangduo@apache.org>
AuthorDate: Mon Feb 3 14:15:57 2020 +0800

    HBASE-23782 We still reference the hard coded meta descriptor in some places when listing
table descriptors (#1115)
    
    Signed-off-by: Viraj Jasani <vjasani@apache.org>
---
 .../hadoop/hbase/util/FSTableDescriptors.java      | 77 +++++++++-------------
 .../org/apache/hadoop/hbase/TestHBaseMetaEdit.java | 31 +++++++--
 .../hbase/regionserver/TestDefaultMemStore.java    |  2 +-
 3 files changed, 56 insertions(+), 54 deletions(-)

diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
index 439c696..56fa883 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.util;
 
+import edu.umd.cs.findbugs.annotations.Nullable;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.Comparator;
@@ -27,8 +28,6 @@ import java.util.concurrent.ConcurrentHashMap;
 import java.util.function.Function;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
-
-import edu.umd.cs.findbugs.annotations.Nullable;
 import org.apache.commons.lang3.NotImplementedException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -37,23 +36,24 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder;
-import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableDescriptors;
+import org.apache.hadoop.hbase.TableInfoMissingException;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.regionserver.BloomType;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;
-import org.apache.hadoop.hbase.TableDescriptors;
-import org.apache.hadoop.hbase.TableInfoMissingException;
-import org.apache.hadoop.hbase.TableName;
 
 /**
  * Implementation of {@link TableDescriptors} that reads descriptors from the
@@ -100,11 +100,6 @@ public class FSTableDescriptors implements TableDescriptors {
   private final Map<TableName, TableDescriptor> cache = new ConcurrentHashMap<>();
 
   /**
-   * Table descriptor for <code>hbase:meta</code> catalog table
-   */
-  private final TableDescriptor metaTableDescriptor;
-
-  /**
    * Construct a FSTableDescriptors instance using the hbase root dir of the given
    * conf and the filesystem where that root dir lives.
    * This instance can do write operations (is not read only).
@@ -135,31 +130,29 @@ public class FSTableDescriptors implements TableDescriptors {
    *                     see HMaster#finishActiveMasterInitialization
    *                     TODO: This is a workaround. Should remove this ugly code...
    */
-  public FSTableDescriptors(final Configuration conf, final FileSystem fs,
-       final Path rootdir, final boolean fsreadonly, final boolean usecache,
-       Function<TableDescriptorBuilder, TableDescriptorBuilder> metaObserver) throws
IOException {
+  public FSTableDescriptors(final Configuration conf, final FileSystem fs, final Path rootdir,
+    final boolean fsreadonly, final boolean usecache,
+    Function<TableDescriptorBuilder, TableDescriptorBuilder> metaObserver) throws IOException
{
     this.fs = fs;
     this.rootdir = rootdir;
     this.fsreadonly = fsreadonly;
     this.usecache = usecache;
-    TableDescriptor td = null;
-    try {
-      td = getTableDescriptorFromFs(fs, rootdir, TableName.META_TABLE_NAME);
-    } catch (TableInfoMissingException e) {
-      td = metaObserver == null? createMetaTableDescriptor(conf):
-        metaObserver.apply(createMetaTableDescriptorBuilder(conf)).build();
-      if (!fsreadonly) {
+    if (!fsreadonly) {
+      // see if we already have meta descriptor on fs. Write one if not.
+      try {
+        getTableDescriptorFromFs(fs, rootdir, TableName.META_TABLE_NAME);
+      } catch (TableInfoMissingException e) {
+        TableDescriptorBuilder builder = createMetaTableDescriptorBuilder(conf);
+        if (metaObserver != null) {
+          metaObserver.apply(builder);
+        }
+        TableDescriptor td = builder.build();
         LOG.info("Creating new hbase:meta table default descriptor/schema {}", td);
         updateTableDescriptor(td);
       }
     }
-    this.metaTableDescriptor = td;
   }
 
-  /**
-   *
-   * Make this private as soon as we've undone test dependency.
-   */
   @VisibleForTesting
   public static TableDescriptorBuilder createMetaTableDescriptorBuilder(final Configuration
conf)
     throws IOException {
@@ -200,12 +193,6 @@ public class FSTableDescriptors implements TableDescriptors {
         .build());
   }
 
-  @VisibleForTesting
-  public static TableDescriptor createMetaTableDescriptor(final Configuration conf)
-      throws IOException {
-    return createMetaTableDescriptorBuilder(conf).build();
-  }
-
   @Override
   public void setCacheOn() throws IOException {
     this.cache.clear();
@@ -266,16 +253,12 @@ public class FSTableDescriptors implements TableDescriptors {
    * Returns a map from table name to table descriptor for all tables.
    */
   @Override
-  public Map<String, TableDescriptor> getAll()
-  throws IOException {
+  public Map<String, TableDescriptor> getAll() throws IOException {
     Map<String, TableDescriptor> tds = new TreeMap<>();
-
     if (fsvisited && usecache) {
       for (Map.Entry<TableName, TableDescriptor> entry: this.cache.entrySet()) {
         tds.put(entry.getKey().getNameWithNamespaceInclAsString(), entry.getValue());
       }
-      // add hbase:meta to the response
-      tds.put(this.metaTableDescriptor.getTableName().getNameAsString(), metaTableDescriptor);
     } else {
       LOG.trace("Fetching table descriptors from the filesystem.");
       boolean allvisited = true;
@@ -558,15 +541,17 @@ public class FSTableDescriptors implements TableDescriptors {
    * @throws IOException Thrown if failed update.
    * @throws NotImplementedException if in read only mode
    */
-  @VisibleForTesting Path updateTableDescriptor(TableDescriptor td)
-  throws IOException {
+  @VisibleForTesting
+  Path updateTableDescriptor(TableDescriptor td) throws IOException {
     if (fsreadonly) {
       throw new NotImplementedException("Cannot update a table descriptor - in read only
mode");
     }
     TableName tableName = td.getTableName();
     Path tableDir = getTableDir(tableName);
     Path p = writeTableDescriptor(fs, td, tableDir, getTableInfoPath(tableDir));
-    if (p == null) throw new IOException("Failed update");
+    if (p == null) {
+      throw new IOException("Failed update");
+    }
     LOG.info("Updated tableinfo=" + p);
     if (usecache) {
       this.cache.put(td.getTableName(), td);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseMetaEdit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseMetaEdit.java
index bbdb327..8c9fe9e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseMetaEdit.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseMetaEdit.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 import java.io.IOException;
-
+import java.util.Collections;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
@@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.client.RegionInfoBuilder;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.regionserver.Region;
-import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.After;
@@ -44,7 +44,7 @@ import org.junit.rules.TestName;
 /**
  * Test being able to edit hbase:meta.
  */
-@Category({MiscTests.class, LargeTests.class})
+@Category({ MiscTests.class, MediumTests.class })
 public class TestHBaseMetaEdit {
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
@@ -63,6 +63,23 @@ public class TestHBaseMetaEdit {
     UTIL.shutdownMiniCluster();
   }
 
+  // make sure that with every possible way, we get the same meta table descriptor.
+  private TableDescriptor getMetaDescriptor() throws TableNotFoundException, IOException
{
+    Admin admin = UTIL.getAdmin();
+    TableDescriptor get = admin.getDescriptor(TableName.META_TABLE_NAME);
+    TableDescriptor list = admin.listTableDescriptors(null, true).stream()
+      .filter(td -> td.isMetaTable()).findAny().get();
+    TableDescriptor listByName =
+      admin.listTableDescriptors(Collections.singletonList(TableName.META_TABLE_NAME)).get(0);
+    TableDescriptor listByNs =
+      admin.listTableDescriptorsByNamespace(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME).stream()
+        .filter(td -> td.isMetaTable()).findAny().get();
+    assertEquals(get, list);
+    assertEquals(get, listByName);
+    assertEquals(get, listByNs);
+    return get;
+  }
+
   /**
    * Set versions, set HBASE-16213 indexed block encoding, and add a column family.
    * Delete the column family. Then try to delete a core hbase:meta family (should fail).
@@ -73,7 +90,7 @@ public class TestHBaseMetaEdit {
   public void testEditMeta() throws IOException {
     Admin admin = UTIL.getAdmin();
     admin.tableExists(TableName.META_TABLE_NAME);
-    TableDescriptor originalDescriptor = admin.getDescriptor(TableName.META_TABLE_NAME);
+    TableDescriptor originalDescriptor = getMetaDescriptor();
     ColumnFamilyDescriptor cfd = originalDescriptor.getColumnFamily(HConstants.CATALOG_FAMILY);
     int oldVersions = cfd.getMaxVersions();
     // Add '1' to current versions count. Set encoding too.
@@ -85,11 +102,11 @@ public class TestHBaseMetaEdit {
     ColumnFamilyDescriptor newCfd =
       ColumnFamilyDescriptorBuilder.newBuilder(extraColumnFamilyName).build();
     admin.addColumnFamily(TableName.META_TABLE_NAME, newCfd);
-    TableDescriptor descriptor = admin.getDescriptor(TableName.META_TABLE_NAME);
+    TableDescriptor descriptor = getMetaDescriptor();
     // Assert new max versions is == old versions plus 1.
     assertEquals(oldVersions + 1,
         descriptor.getColumnFamily(HConstants.CATALOG_FAMILY).getMaxVersions());
-    descriptor = admin.getDescriptor(TableName.META_TABLE_NAME);
+    descriptor = getMetaDescriptor();
     // Assert new max versions is == old versions plus 1.
     assertEquals(oldVersions + 1,
         descriptor.getColumnFamily(HConstants.CATALOG_FAMILY).getMaxVersions());
@@ -107,7 +124,7 @@ public class TestHBaseMetaEdit {
     assertTrue(r.getStore(extraColumnFamilyName) != null);
     // Assert we can't drop critical hbase:meta column family but we can drop any other.
     admin.deleteColumnFamily(TableName.META_TABLE_NAME, newCfd.getName());
-    descriptor = admin.getDescriptor(TableName.META_TABLE_NAME);
+    descriptor = getMetaDescriptor();
     assertTrue(descriptor.getColumnFamily(newCfd.getName()) == null);
     try {
       admin.deleteColumnFamily(TableName.META_TABLE_NAME, HConstants.CATALOG_FAMILY);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
index 77f796f..af3cad7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
@@ -949,7 +949,7 @@ public class TestDefaultMemStore {
     edge.setCurrentTimeMillis(1234);
     WALFactory wFactory = new WALFactory(conf, "1234");
     HRegion meta = HRegion.createHRegion(RegionInfoBuilder.FIRST_META_REGIONINFO, testDir,
-        conf, FSTableDescriptors.createMetaTableDescriptor(conf),
+        conf, FSTableDescriptors.createMetaTableDescriptorBuilder(conf).build(),
         wFactory.getWAL(RegionInfoBuilder.FIRST_META_REGIONINFO));
     // parameterized tests add [#] suffix get rid of [ and ].
     TableDescriptor desc = TableDescriptorBuilder


Mime
View raw message