hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chia7...@apache.org
Subject [1/3] hbase git commit: HBASE-18008 Any HColumnDescriptor we give out should be immutable
Date Thu, 08 Jun 2017 15:27:57 GMT
Repository: hbase
Updated Branches:
  refs/heads/master f17fa223b -> 72cb7d97c


http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
index 6c0fa65..7a90a71 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
@@ -20,11 +20,11 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -32,13 +32,12 @@ import java.util.TreeMap;
 import java.util.TreeSet;
 import java.util.function.Function;
 import java.util.regex.Matcher;
+import java.util.stream.Stream;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Coprocessor;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -49,12 +48,10 @@ import org.apache.hadoop.hbase.util.Bytes;
 
 @InterfaceAudience.Public
 public class TableDescriptorBuilder {
-
-  private static final Log LOG = LogFactory.getLog(TableDescriptorBuilder.class);
-
+  public static final Log LOG = LogFactory.getLog(TableDescriptorBuilder.class);
   @InterfaceAudience.Private
   public static final String SPLIT_POLICY = "SPLIT_POLICY";
-
+  private static final Bytes SPLIT_POLICY_KEY = new Bytes(Bytes.toBytes(SPLIT_POLICY));
   /**
    * Used by HBase Shell interface to access this metadata
    * attribute which denotes the maximum size of the store file after which a
@@ -101,7 +98,7 @@ public class TableDescriptorBuilder {
 
   @InterfaceAudience.Private
   public static final String FLUSH_POLICY = "FLUSH_POLICY";
-
+  private static final Bytes FLUSH_POLICY_KEY = new Bytes(Bytes.toBytes(FLUSH_POLICY));
   /**
    * Used by rest interface to access this metadata attribute
    * which denotes if it is a catalog table, either <code> hbase:meta </code>.
@@ -162,17 +159,6 @@ public class TableDescriptorBuilder {
    */
   private static final int DEFAULT_PRIORITY = HConstants.NORMAL_QOS;
 
-  /*
-     *  The below are ugly but better than creating them each time till we
-     *  replace booleans being saved as Strings with plain booleans.  Need a
-     *  migration script to do this.  TODO.
-   */
-  private static final Bytes FALSE
-          = new Bytes(Bytes.toBytes(Boolean.FALSE.toString()));
-
-  private static final Bytes TRUE
-          = new Bytes(Bytes.toBytes(Boolean.TRUE.toString()));
-
   /**
    * Constant that denotes whether the table is READONLY by default and is false
    */
@@ -228,7 +214,7 @@ public class TableDescriptorBuilder {
    */
   public static final TableDescriptor NAMESPACE_TABLEDESC
     = TableDescriptorBuilder.newBuilder(TableName.NAMESPACE_TABLE_NAME)
-                            .addFamily(new HColumnDescriptor(NAMESPACE_FAMILY_INFO)
+                            .addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(NAMESPACE_FAMILY_INFO_BYTES)
                               // Ten is arbitrary number.  Keep versions to help debugging.
                               .setMaxVersions(10)
                               .setInMemory(true)
@@ -236,8 +222,9 @@ public class TableDescriptorBuilder {
                               .setScope(HConstants.REPLICATION_SCOPE_LOCAL)
                               // Enable cache of data blocks in L1 if more than one caching tier deployed:
                               // e.g. if using CombinedBlockCache (BucketCache).
-                              .setCacheDataInL1(true))
-                            .doBuild();
+                              .setCacheDataInL1(true)
+                              .build())
+                            .build();
   private final ModifyableTableDescriptor desc;
 
   /**
@@ -248,10 +235,6 @@ public class TableDescriptorBuilder {
     if (desc instanceof ModifyableTableDescriptor) {
       return ((ModifyableTableDescriptor) desc).toByteArray();
     }
-    // TODO: remove this if the HTableDescriptor is removed
-    if (desc instanceof HTableDescriptor) {
-      return ((HTableDescriptor) desc).toByteArray();
-    }
     return new ModifyableTableDescriptor(desc).toByteArray();
   }
 
@@ -261,14 +244,18 @@ public class TableDescriptorBuilder {
    * @return This instance serialized with pb with pb magic prefix
    * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
    */
-  public static TableDescriptorBuilder newBuilder(byte[] pbBytes) throws DeserializationException {
-    return new TableDescriptorBuilder(ModifyableTableDescriptor.parseFrom(pbBytes));
+  public static TableDescriptor parseFrom(byte[] pbBytes) throws DeserializationException {
+    return ModifyableTableDescriptor.parseFrom(pbBytes);
   }
 
   public static TableDescriptorBuilder newBuilder(final TableName name) {
     return new TableDescriptorBuilder(name);
   }
 
+  public static TableDescriptor copy(TableDescriptor desc) throws DeserializationException {
+    return new ModifyableTableDescriptor(desc);
+  }
+
   /**
    * Copy all configuration, values, families, and name from the input.
    * @param desc The desciptor to copy
@@ -301,23 +288,23 @@ public class TableDescriptorBuilder {
     return this;
   }
 
-  public TableDescriptorBuilder addFamily(final HColumnDescriptor family) {
-    desc.addFamily(family);
+  public TableDescriptorBuilder addColumnFamily(final ColumnFamilyDescriptor family) {
+    desc.addColumnFamily(family);
     return this;
   }
 
-  public TableDescriptorBuilder modifyFamily(final HColumnDescriptor family) {
-    desc.modifyFamily(family);
+  public TableDescriptorBuilder modifyColumnFamily(final ColumnFamilyDescriptor family) {
+    desc.modifyColumnFamily(family);
     return this;
   }
 
-  public TableDescriptorBuilder remove(Bytes key) {
-    desc.remove(key);
+  public TableDescriptorBuilder removeValue(Bytes key) {
+    desc.removeValue(key);
     return this;
   }
 
-  public TableDescriptorBuilder remove(byte[] key) {
-    desc.remove(key);
+  public TableDescriptorBuilder removeValue(byte[] key) {
+    desc.removeValue(key);
     return this;
   }
 
@@ -326,8 +313,8 @@ public class TableDescriptorBuilder {
     return this;
   }
 
-  public TableDescriptorBuilder removeFamily(final byte[] column) {
-    desc.removeFamily(column);
+  public TableDescriptorBuilder removeColumnFamily(final byte[] name) {
+    desc.removeColumnFamily(name);
     return this;
   }
 
@@ -418,13 +405,7 @@ public class TableDescriptorBuilder {
     return this;
   }
 
-  // TODO: replaced the HTableDescriptor by TableDescriptor
-  public HTableDescriptor build() {
-    return new HTableDescriptor(desc);
-  }
-
-  // TODO: remove this in HBase 3.0.0.
-  private TableDescriptor doBuild() {
+  public TableDescriptor build() {
     return new ModifyableTableDescriptor(desc);
   }
 
@@ -452,55 +433,49 @@ public class TableDescriptorBuilder {
     private final Map<String, String> configuration = new HashMap<>();
 
     /**
-     * Maps column family name to the respective HColumnDescriptors
+     * Maps column family name to the respective FamilyDescriptors
      */
-    private final Map<byte[], HColumnDescriptor> families
+    private final Map<byte[], ColumnFamilyDescriptor> families
             = new TreeMap<>(Bytes.BYTES_RAWCOMPARATOR);
 
     /**
      * Construct a table descriptor specifying a TableName object
      *
      * @param name Table name.
-     * @see
-     * <a href="https://issues.apache.org/jira/browse/HBASE-174">HADOOP-1581
-     * HBASE: (HBASE-174) Un-openable tablename bug</a>
+     * TODO: make this private after removing the HTableDescriptor
      */
-    private ModifyableTableDescriptor(final TableName name) {
+    @InterfaceAudience.Private
+    public ModifyableTableDescriptor(final TableName name) {
       this(name, Collections.EMPTY_LIST, Collections.EMPTY_MAP, Collections.EMPTY_MAP);
     }
 
+    private ModifyableTableDescriptor(final TableDescriptor desc) {
+      this(desc.getTableName(), Arrays.asList(desc.getColumnFamilies()), desc.getValues(), desc.getConfiguration());
+    }
+
     /**
      * Construct a table descriptor by cloning the descriptor passed as a
      * parameter.
      * <p>
      * Makes a deep copy of the supplied descriptor.
-     * TODO: make this private after removing the HTableDescriptor
+     * @param name The new name
      * @param desc The descriptor.
+     * TODO: make this private after removing the HTableDescriptor
      */
     @InterfaceAudience.Private
-    protected ModifyableTableDescriptor(final TableDescriptor desc) {
-      this(desc.getTableName(), desc.getFamilies(), desc.getValues(), desc.getConfiguration());
+    @Deprecated // only used by HTableDescriptor. remove this method if HTD is removed
+    public ModifyableTableDescriptor(final TableName name, final TableDescriptor desc) {
+      this(name, Arrays.asList(desc.getColumnFamilies()), desc.getValues(), desc.getConfiguration());
     }
 
-    // TODO: make this private after removing the HTableDescriptor
-    @InterfaceAudience.Private
-    public ModifyableTableDescriptor(final TableName name, final Collection<HColumnDescriptor> families,
+    private ModifyableTableDescriptor(final TableName name, final Collection<ColumnFamilyDescriptor> families,
             Map<Bytes, Bytes> values, Map<String, String> configuration) {
       this.name = name;
-      families.forEach(c -> this.families.put(c.getName(), new HColumnDescriptor(c)));
-      values.forEach(this.values::put);
-      configuration.forEach(this.configuration::put);
-      setMetaFlags(name);
-    }
-
-    /*
-     * Set meta flags on this table.
-     * IS_META_KEY is set if its a hbase:meta table
-     * Called by constructors.
-     * @param name
-     */
-    private void setMetaFlags(final TableName name) {
-      values.put(IS_META_KEY, name.equals(TableName.META_TABLE_NAME) ? TRUE : FALSE);
+      families.forEach(c -> this.families.put(c.getName(), ColumnFamilyDescriptorBuilder.copy(c)));
+      this.values.putAll(values);
+      this.configuration.putAll(configuration);
+      this.values.put(IS_META_KEY,
+        new Bytes(Bytes.toBytes(Boolean.toString(name.equals(TableName.META_TABLE_NAME)))));
     }
 
     /**
@@ -510,16 +485,7 @@ public class TableDescriptorBuilder {
      */
     @Override
     public boolean isMetaRegion() {
-      return isSomething(IS_META_KEY, false);
-    }
-
-    private boolean isSomething(final Bytes key,
-            final boolean valueIfNull) {
-      byte[] value = getValue(key);
-      if (value != null) {
-        return Boolean.valueOf(Bytes.toString(value));
-      }
-      return valueIfNull;
+      return getOrDefault(IS_META_KEY, Boolean::valueOf, false);
     }
 
     /**
@@ -532,39 +498,24 @@ public class TableDescriptorBuilder {
       return isMetaRegion();
     }
 
-    /**
-     * Getter for accessing the metadata associated with the key
-     *
-     * @param key The key.
-     * @return The value.
-     * @see #values
-     */
     @Override
-    public byte[] getValue(byte[] key) {
-      return getValue(new Bytes(key));
+    public Bytes getValue(Bytes key) {
+      return values.get(key);
     }
 
-    private byte[] getValue(final Bytes key) {
-      Bytes ibw = values.get(key);
-      if (ibw == null) {
-        return null;
-      }
-      return ibw.get();
+    @Override
+    public byte[] getValue(byte[] key) {
+      Bytes value = values.get(new Bytes(key));
+      return value == null ? null : value.get();
     }
 
-    /**
-     * Getter for accessing the metadata associated with the key
-     *
-     * @param key The key.
-     * @return The value.
-     * @see #values
-     */
-    public String getValue(String key) {
-      byte[] value = getValue(Bytes.toBytes(key));
+    private <T> T getOrDefault(Bytes key, Function<String, T> function, T defaultValue) {
+      Bytes value = values.get(key);
       if (value == null) {
-        return null;
+        return defaultValue;
+      } else {
+        return function.apply(Bytes.toString(value.get(), value.getOffset(), value.getLength()));
       }
-      return Bytes.toString(value);
     }
 
     /**
@@ -609,26 +560,13 @@ public class TableDescriptorBuilder {
      */
     public ModifyableTableDescriptor setValue(final Bytes key, final Bytes value) {
       if (value == null) {
-        remove(key);
+        values.remove(key);
       } else {
         values.put(key, value);
       }
       return this;
     }
 
-    /**
-     * Setter for storing metadata as a (key, value) pair in {@link #values} map
-     *
-     * @param key The key.
-     * @param value The value. If null, removes the setting.
-     * @return the modifyable TD
-     * @see #values
-     */
-    public ModifyableTableDescriptor setValue(String key, String value) {
-      return setValue(toBytesOrNull(key, Bytes::toBytes),
-              toBytesOrNull(value, Bytes::toBytes));
-    }
-
     private static <T> Bytes toBytesOrNull(T t, Function<T, byte[]> f) {
       if (t == null) {
         return null;
@@ -642,19 +580,10 @@ public class TableDescriptorBuilder {
      *
      * @param key Key whose key and value we're to remove from TableDescriptor
      * parameters.
+     * @return the modifyable TD
      */
-    public void remove(final String key) {
-      remove(new Bytes(Bytes.toBytes(key)));
-    }
-
-    /**
-     * Remove metadata represented by the key from the {@link #values} map
-     *
-     * @param key Key whose key and value we're to remove from TableDescriptor
-     * parameters.
-     */
-    public void remove(Bytes key) {
-      values.remove(key);
+    public ModifyableTableDescriptor removeValue(Bytes key) {
+      return setValue(key, (Bytes) null);
     }
 
     /**
@@ -662,9 +591,10 @@ public class TableDescriptorBuilder {
      *
      * @param key Key whose key and value we're to remove from TableDescriptor
      * parameters.
+     * @return the modifyable TD
      */
-    public void remove(final byte[] key) {
-      remove(new Bytes(key));
+    public ModifyableTableDescriptor removeValue(final byte[] key) {
+      return removeValue(new Bytes(key));
     }
 
     /**
@@ -676,7 +606,7 @@ public class TableDescriptorBuilder {
      */
     @Override
     public boolean isReadOnly() {
-      return isSomething(READONLY_KEY, DEFAULT_READONLY);
+      return getOrDefault(READONLY_KEY, Boolean::valueOf, DEFAULT_READONLY);
     }
 
     /**
@@ -690,7 +620,7 @@ public class TableDescriptorBuilder {
      * @return the modifyable TD
      */
     public ModifyableTableDescriptor setReadOnly(final boolean readOnly) {
-      return setValue(READONLY_KEY, readOnly ? TRUE : FALSE);
+      return setValue(READONLY_KEY, Boolean.toString(readOnly));
     }
 
     /**
@@ -701,7 +631,7 @@ public class TableDescriptorBuilder {
      */
     @Override
     public boolean isCompactionEnabled() {
-      return isSomething(COMPACTION_ENABLED_KEY, DEFAULT_COMPACTION_ENABLED);
+      return getOrDefault(COMPACTION_ENABLED_KEY, Boolean::valueOf, DEFAULT_COMPACTION_ENABLED);
     }
 
     /**
@@ -711,7 +641,7 @@ public class TableDescriptorBuilder {
      * @return the modifyable TD
      */
     public ModifyableTableDescriptor setCompactionEnabled(final boolean isEnable) {
-      return setValue(COMPACTION_ENABLED_KEY, isEnable ? TRUE : FALSE);
+      return setValue(COMPACTION_ENABLED_KEY, Boolean.toString(isEnable));
     }
 
     /**
@@ -722,7 +652,7 @@ public class TableDescriptorBuilder {
      */
     @Override
     public boolean isNormalizationEnabled() {
-      return isSomething(NORMALIZATION_ENABLED_KEY, DEFAULT_NORMALIZATION_ENABLED);
+      return getOrDefault(NORMALIZATION_ENABLED_KEY, Boolean::valueOf, DEFAULT_NORMALIZATION_ENABLED);
     }
 
     /**
@@ -732,7 +662,7 @@ public class TableDescriptorBuilder {
      * @return the modifyable TD
      */
     public ModifyableTableDescriptor setNormalizationEnabled(final boolean isEnable) {
-      return setValue(NORMALIZATION_ENABLED_KEY, isEnable ? TRUE : FALSE);
+      return setValue(NORMALIZATION_ENABLED_KEY, Boolean.toString(isEnable));
     }
 
     /**
@@ -753,18 +683,7 @@ public class TableDescriptorBuilder {
      */
     @Override
     public Durability getDurability() {
-      byte[] durabilityValue = getValue(DURABILITY_KEY);
-      if (durabilityValue == null) {
-        return DEFAULT_DURABLITY;
-      } else {
-        try {
-          return Durability.valueOf(Bytes.toString(durabilityValue));
-        } catch (IllegalArgumentException ex) {
-          LOG.warn("Received " + ex + " because Durability value for TableDescriptor"
-                  + " is not known. Durability:" + Bytes.toString(durabilityValue));
-          return DEFAULT_DURABLITY;
-        }
-      }
+      return getOrDefault(DURABILITY_KEY, Durability::valueOf, DEFAULT_DURABLITY);
     }
 
     /**
@@ -786,7 +705,7 @@ public class TableDescriptorBuilder {
      * @return the modifyable TD
      */
     public ModifyableTableDescriptor setRegionSplitPolicyClassName(String clazz) {
-      return setValue(SPLIT_POLICY, clazz);
+      return setValue(SPLIT_POLICY_KEY, clazz);
     }
 
     /**
@@ -799,7 +718,7 @@ public class TableDescriptorBuilder {
      */
     @Override
     public String getRegionSplitPolicyClassName() {
-      return getValue(SPLIT_POLICY);
+      return getOrDefault(SPLIT_POLICY_KEY, Function.identity(), null);
     }
 
     /**
@@ -813,11 +732,7 @@ public class TableDescriptorBuilder {
      */
     @Override
     public long getMaxFileSize() {
-      byte[] value = getValue(MAX_FILESIZE_KEY);
-      if (value != null) {
-        return Long.parseLong(Bytes.toString(value));
-      }
-      return -1;
+      return getOrDefault(MAX_FILESIZE_KEY, Long::valueOf, (long) -1);
     }
 
     /**
@@ -850,11 +765,7 @@ public class TableDescriptorBuilder {
      */
     @Override
     public long getMemStoreFlushSize() {
-      byte[] value = getValue(MEMSTORE_FLUSHSIZE_KEY);
-      if (value != null) {
-        return Long.parseLong(Bytes.toString(value));
-      }
-      return -1;
+      return getOrDefault(MEMSTORE_FLUSHSIZE_KEY, Long::valueOf, (long) -1);
     }
 
     /**
@@ -879,7 +790,7 @@ public class TableDescriptorBuilder {
      * @return the modifyable TD
      */
     public ModifyableTableDescriptor setFlushPolicyClassName(String clazz) {
-      return setValue(FLUSH_POLICY, clazz);
+      return setValue(FLUSH_POLICY_KEY, clazz);
     }
 
     /**
@@ -892,46 +803,45 @@ public class TableDescriptorBuilder {
      */
     @Override
     public String getFlushPolicyClassName() {
-      return getValue(FLUSH_POLICY);
+      return getOrDefault(FLUSH_POLICY_KEY, Function.identity(), null);
     }
 
     /**
      * Adds a column family. For the updating purpose please use
-     * {@link #modifyFamily(HColumnDescriptor)} instead.
+     * {@link #modifyColumnFamily(ColumnFamilyDescriptor)} instead.
      *
-     * @param family HColumnDescriptor of family to add.
+     * @param family to add.
      * @return the modifyable TD
      */
-    public ModifyableTableDescriptor addFamily(final HColumnDescriptor family) {
+    public ModifyableTableDescriptor addColumnFamily(final ColumnFamilyDescriptor family) {
       if (family.getName() == null || family.getName().length <= 0) {
         throw new IllegalArgumentException("Family name cannot be null or empty");
       }
-      if (hasFamily(family.getName())) {
+      if (hasColumnFamily(family.getName())) {
         throw new IllegalArgumentException("Family '"
                 + family.getNameAsString() + "' already exists so cannot be added");
       }
-      return setFamily(family);
+      return putColumnFamily(family);
     }
 
     /**
      * Modifies the existing column family.
      *
-     * @param family HColumnDescriptor of family to update
+     * @param family to update
      * @return this (for chained invocation)
      */
-    public ModifyableTableDescriptor modifyFamily(final HColumnDescriptor family) {
+    public ModifyableTableDescriptor modifyColumnFamily(final ColumnFamilyDescriptor family) {
       if (family.getName() == null || family.getName().length <= 0) {
         throw new IllegalArgumentException("Family name cannot be null or empty");
       }
-      if (!hasFamily(family.getName())) {
+      if (!hasColumnFamily(family.getName())) {
         throw new IllegalArgumentException("Column family '" + family.getNameAsString()
                 + "' does not exist");
       }
-      return setFamily(family);
+      return putColumnFamily(family);
     }
 
-    // TODO: make this private after removing the UnmodifyableTableDescriptor
-    protected ModifyableTableDescriptor setFamily(HColumnDescriptor family) {
+    private ModifyableTableDescriptor putColumnFamily(ColumnFamilyDescriptor family) {
       families.put(family.getName(), family);
       return this;
     }
@@ -943,7 +853,7 @@ public class TableDescriptorBuilder {
      * @return true if the table contains the specified family name
      */
     @Override
-    public boolean hasFamily(final byte[] familyName) {
+    public boolean hasColumnFamily(final byte[] familyName) {
       return families.containsKey(familyName);
     }
 
@@ -1085,6 +995,7 @@ public class TableDescriptorBuilder {
      * parameter. Checks if the obj passed is an instance of ModifyableTableDescriptor,
      * if yes then the contents of the descriptors are compared.
      *
+     * @param obj The object to compare
      * @return true if the contents of the the two descriptors exactly match
      *
      * @see java.lang.Object#equals(java.lang.Object)
@@ -1104,13 +1015,13 @@ public class TableDescriptorBuilder {
     }
 
     /**
-     * @see java.lang.Object#hashCode()
+     * @return hash code
      */
     @Override
     public int hashCode() {
       int result = this.name.hashCode();
       if (this.families.size() > 0) {
-        for (HColumnDescriptor e : this.families.values()) {
+        for (ColumnFamilyDescriptor e : this.families.values()) {
           result ^= e.hashCode();
         }
       }
@@ -1131,52 +1042,12 @@ public class TableDescriptorBuilder {
      */
     @Override
     public int compareTo(final ModifyableTableDescriptor other) {
-      int result = this.name.compareTo(other.name);
-      if (result == 0) {
-        result = families.size() - other.families.size();
-      }
-      if (result == 0 && families.size() != other.families.size()) {
-        result = Integer.valueOf(families.size()).compareTo(other.families.size());
-      }
-      if (result == 0) {
-        for (Iterator<HColumnDescriptor> it = families.values().iterator(),
-                it2 = other.families.values().iterator(); it.hasNext();) {
-          result = it.next().compareTo(it2.next());
-          if (result != 0) {
-            break;
-          }
-        }
-      }
-      if (result == 0) {
-        // punt on comparison for ordering, just calculate difference
-        result = this.values.hashCode() - other.values.hashCode();
-        if (result < 0) {
-          result = -1;
-        } else if (result > 0) {
-          result = 1;
-        }
-      }
-      if (result == 0) {
-        result = this.configuration.hashCode() - other.configuration.hashCode();
-        if (result < 0) {
-          result = -1;
-        } else if (result > 0) {
-          result = 1;
-        }
-      }
-      return result;
+      return TableDescriptor.COMPARATOR.compare(this, other);
     }
 
-    /**
-     * Returns an unmodifiable collection of all the {@link HColumnDescriptor}
-     * of all the column families of the table.
-     *
-     * @return Immutable collection of {@link HColumnDescriptor} of all the
-     * column families.
-     */
     @Override
-    public Collection<HColumnDescriptor> getFamilies() {
-      return Collections.unmodifiableCollection(this.families.values());
+    public ColumnFamilyDescriptor[] getColumnFamilies() {
+      return families.values().toArray(new ColumnFamilyDescriptor[families.size()]);
     }
 
     /**
@@ -1185,8 +1056,7 @@ public class TableDescriptorBuilder {
      */
     @Override
     public boolean hasSerialReplicationScope() {
-      return getFamilies()
-              .stream()
+      return Stream.of(getColumnFamilies())
               .anyMatch(column -> column.getScope() == HConstants.REPLICATION_SCOPE_SERIAL);
     }
 
@@ -1195,15 +1065,7 @@ public class TableDescriptorBuilder {
      */
     @Override
     public int getRegionReplication() {
-      return getIntValue(REGION_REPLICATION_KEY, DEFAULT_REGION_REPLICATION);
-    }
-
-    private int getIntValue(Bytes key, int defaultVal) {
-      byte[] val = getValue(key);
-      if (val == null || val.length == 0) {
-        return defaultVal;
-      }
-      return Integer.parseInt(Bytes.toString(val));
+      return getOrDefault(REGION_REPLICATION_KEY, Integer::valueOf, DEFAULT_REGION_REPLICATION);
     }
 
     /**
@@ -1213,8 +1075,7 @@ public class TableDescriptorBuilder {
      * @return the modifyable TD
      */
     public ModifyableTableDescriptor setRegionReplication(int regionReplication) {
-      return setValue(REGION_REPLICATION_KEY,
-              new Bytes(Bytes.toBytes(Integer.toString(regionReplication))));
+      return setValue(REGION_REPLICATION_KEY, Integer.toString(regionReplication));
     }
 
     /**
@@ -1222,7 +1083,7 @@ public class TableDescriptorBuilder {
      */
     @Override
     public boolean hasRegionMemstoreReplication() {
-      return isSomething(REGION_MEMSTORE_REPLICATION_KEY, DEFAULT_REGION_MEMSTORE_REPLICATION);
+      return getOrDefault(REGION_MEMSTORE_REPLICATION_KEY, Boolean::valueOf, DEFAULT_REGION_MEMSTORE_REPLICATION);
     }
 
     /**
@@ -1236,7 +1097,7 @@ public class TableDescriptorBuilder {
      * @return the modifyable TD
      */
     public ModifyableTableDescriptor setRegionMemstoreReplication(boolean memstoreReplication) {
-      setValue(REGION_MEMSTORE_REPLICATION_KEY, memstoreReplication ? TRUE : FALSE);
+      setValue(REGION_MEMSTORE_REPLICATION_KEY, Boolean.toString(memstoreReplication));
       // If the memstore replication is setup, we do not have to wait for observing a flush event
       // from primary before starting to serve reads, because gaps from replication is not applicable
       return setConfiguration(RegionReplicaUtil.REGION_REPLICA_WAIT_FOR_PRIMARY_FLUSH_CONF_KEY,
@@ -1249,48 +1110,24 @@ public class TableDescriptorBuilder {
 
     @Override
     public int getPriority() {
-      return getIntValue(PRIORITY_KEY, DEFAULT_PRIORITY);
+      return getOrDefault(PRIORITY_KEY, Integer::valueOf, DEFAULT_PRIORITY);
     }
 
     /**
      * Returns all the column family names of the current table. The map of
-     * TableDescriptor contains mapping of family name to HColumnDescriptors.
+     * TableDescriptor contains mapping of family name to ColumnFamilyDescriptor.
      * This returns all the keys of the family map which represents the column
      * family names of the table.
      *
      * @return Immutable sorted set of the keys of the families.
      */
     @Override
-    public Set<byte[]> getFamiliesKeys() {
+    public Set<byte[]> getColumnFamilyNames() {
       return Collections.unmodifiableSet(this.families.keySet());
     }
 
     /**
-     * Returns the count of the column families of the table.
-     *
-     * @return Count of column families of the table
-     */
-    @Override
-    public int getColumnFamilyCount() {
-      return families.size();
-    }
-
-    /**
-     * Returns an array all the {@link HColumnDescriptor} of the column families
-     * of the table.
-     *
-     * @return Array of all the HColumnDescriptors of the current table
-     *
-     * @see #getFamilies()
-     */
-    @Override
-    public HColumnDescriptor[] getColumnFamilies() {
-      Collection<HColumnDescriptor> hColumnDescriptors = getFamilies();
-      return hColumnDescriptors.toArray(new HColumnDescriptor[hColumnDescriptors.size()]);
-    }
-
-    /**
-     * Returns the HColumnDescriptor for a specific column family with name as
+     * Returns the ColumnFamilyDescriptor for a specific column family with name as
      * specified by the parameter column.
      *
      * @param column Column family name
@@ -1298,19 +1135,19 @@ public class TableDescriptorBuilder {
      * passed in column.
      */
     @Override
-    public HColumnDescriptor getFamily(final byte[] column) {
+    public ColumnFamilyDescriptor getColumnFamily(final byte[] column) {
       return this.families.get(column);
     }
 
     /**
-     * Removes the HColumnDescriptor with name specified by the parameter column
+     * Removes the ColumnFamilyDescriptor with name specified by the parameter column
      * from the table descriptor
      *
      * @param column Name of the column family to be removed.
      * @return Column descriptor for the passed family name or the family on
      * passed in column.
      */
-    public HColumnDescriptor removeFamily(final byte[] column) {
+    public ColumnFamilyDescriptor removeColumnFamily(final byte[] column) {
       return this.families.remove(column);
     }
 
@@ -1523,7 +1360,7 @@ public class TableDescriptorBuilder {
       }
       // if we found a match, remove it
       if (match != null) {
-        remove(match);
+        ModifyableTableDescriptor.this.removeValue(match);
       }
     }
 
@@ -1535,27 +1372,22 @@ public class TableDescriptorBuilder {
     // used by admin.rb:alter(table_name,*args) to update owner.
     @Deprecated
     public ModifyableTableDescriptor setOwnerString(String ownerString) {
-      if (ownerString != null) {
-        setValue(OWNER_KEY, ownerString);
-      } else {
-        remove(OWNER_KEY);
-      }
-      return this;
+      return setValue(OWNER_KEY, ownerString);
     }
 
     @Override
     @Deprecated
     public String getOwnerString() {
-      if (getValue(OWNER_KEY) != null) {
-        return Bytes.toString(getValue(OWNER_KEY));
-      }
       // Note that every table should have an owner (i.e. should have OWNER_KEY set).
       // hbase:meta should return system user as owner, not null (see
       // MasterFileSystem.java:bootstrap()).
-      return null;
+      return getOrDefault(OWNER_KEY, Function.identity(), null);
     }
 
-    public byte[] toByteArray() {
+    /**
+     * @return the bytes in pb format
+     */
+    private byte[] toByteArray() {
       return ProtobufUtil.prependPBMagic(ProtobufUtil.convertToTableSchema(this).toByteArray());
     }
 
@@ -1567,7 +1399,7 @@ public class TableDescriptorBuilder {
      * @throws DeserializationException
      * @see #toByteArray()
      */
-    public static TableDescriptor parseFrom(final byte[] bytes)
+    private static TableDescriptor parseFrom(final byte[] bytes)
             throws DeserializationException {
       if (!ProtobufUtil.isPBMagicPrefix(bytes)) {
         throw new DeserializationException("Expected PB encoded ModifyableTableDescriptor");
@@ -1609,7 +1441,7 @@ public class TableDescriptorBuilder {
      */
     public ModifyableTableDescriptor setConfiguration(String key, String value) {
       if (value == null) {
-        removeConfiguration(key);
+        configuration.remove(key);
       } else {
         configuration.put(key, value);
       }
@@ -1620,9 +1452,15 @@ public class TableDescriptorBuilder {
      * Remove a config setting represented by the key from the
      * {@link #configuration} map
      * @param key Config key.
+     * @return the modifyable TD
      */
-    public void removeConfiguration(final String key) {
-      configuration.remove(key);
+    public ModifyableTableDescriptor removeConfiguration(final String key) {
+      return setConfiguration(key, null);
+    }
+
+    @Override
+    public int getColumnFamilyCount() {
+      return families.size();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
index 5c4dd55..b196911 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
@@ -63,6 +63,8 @@ import org.apache.hadoop.hbase.TagUtil;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.ClientUtil;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.CompactionState;
 import org.apache.hadoop.hbase.client.Consistency;
 import org.apache.hadoop.hbase.client.Cursor;
@@ -2924,7 +2926,7 @@ public final class ProtobufUtil {
    * @param hcd the HColummnDescriptor
    * @return Convert this instance to a the pb column family type
    */
-  public static ColumnFamilySchema convertToColumnFamilySchema(HColumnDescriptor hcd) {
+  public static ColumnFamilySchema convertToColumnFamilySchema(ColumnFamilyDescriptor hcd) {
     ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder();
     builder.setName(UnsafeByteOperations.unsafeWrap(hcd.getName()));
     for (Map.Entry<Bytes, Bytes> e : hcd.getValues().entrySet()) {
@@ -2947,6 +2949,7 @@ public final class ProtobufUtil {
    * @param cfs the ColumnFamilySchema
    * @return An {@link HColumnDescriptor} made from the passed in <code>cfs</code>
    */
+  @Deprecated
   public static HColumnDescriptor convertToHColumnDesc(final ColumnFamilySchema cfs) {
     // Use the empty constructor so we preserve the initial values set on construction for things
     // like maxVersion.  Otherwise, we pick up wrong values on deserialization which makes for
@@ -2962,6 +2965,22 @@ public final class ProtobufUtil {
   }
 
   /**
+   * Converts a ColumnFamilySchema to HColumnDescriptor
+   * @param cfs the ColumnFamilySchema
+   * @return An {@link HColumnDescriptor} made from the passed in <code>cfs</code>
+   */
+  public static ColumnFamilyDescriptor convertToColumnDesc(final ColumnFamilySchema cfs) {
+    // Use the empty constructor so we preserve the initial values set on construction for things
+    // like maxVersion.  Otherwise, we pick up wrong values on deserialization which makes for
+    // unrelated-looking test failures that are hard to trace back to here.
+    ColumnFamilyDescriptorBuilder builder
+      = ColumnFamilyDescriptorBuilder.newBuilder(cfs.getName().toByteArray());
+    cfs.getAttributesList().forEach(a -> builder.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray()));
+    cfs.getConfigurationList().forEach(a -> builder.setConfiguration(a.getName(), a.getValue()));
+    return builder.build();
+  }
+
+  /**
    * Converts an HTableDescriptor to TableSchema
    * @param htd the HTableDescriptor
    * @return Convert the current {@link HTableDescriptor} into a pb TableSchema instance.
@@ -2975,7 +2994,7 @@ public final class ProtobufUtil {
       aBuilder.setSecond(UnsafeByteOperations.unsafeWrap(e.getValue().get()));
       builder.addAttributes(aBuilder.build());
     }
-    for (HColumnDescriptor hcd : htd.getColumnFamilies()) {
+    for (ColumnFamilyDescriptor hcd : htd.getColumnFamilies()) {
       builder.addColumnFamilies(convertToColumnFamilySchema(hcd));
     }
     for (Map.Entry<String, String> e : htd.getConfiguration().entrySet()) {
@@ -3024,8 +3043,8 @@ public final class ProtobufUtil {
       = TableDescriptorBuilder.newBuilder(ProtobufUtil.toTableName(ts.getTableName()));
     ts.getColumnFamiliesList()
       .stream()
-      .map(ProtobufUtil::convertToHColumnDesc)
-      .forEach(builder::addFamily);
+      .map(ProtobufUtil::convertToColumnDesc)
+      .forEach(builder::addColumnFamily);
     ts.getAttributesList()
       .forEach(a -> builder.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray()));
     ts.getConfigurationList()

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
index 134c319..67f7d0a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
@@ -25,7 +25,6 @@ import java.util.regex.Pattern;
 
 import org.apache.hadoop.hbase.CellScannable;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -35,6 +34,7 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Action;
 import org.apache.hadoop.hbase.client.Append;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
@@ -1031,7 +1031,7 @@ public final class RequestConverter {
    */
   public static AddColumnRequest buildAddColumnRequest(
       final TableName tableName,
-      final HColumnDescriptor column,
+      final ColumnFamilyDescriptor column,
       final long nonceGroup,
       final long nonce) {
     AddColumnRequest.Builder builder = AddColumnRequest.newBuilder();
@@ -1071,7 +1071,7 @@ public final class RequestConverter {
    */
   public static ModifyColumnRequest buildModifyColumnRequest(
       final TableName tableName,
-      final HColumnDescriptor column,
+      final ColumnFamilyDescriptor column,
       final long nonceGroup,
       final long nonce) {
     ModifyColumnRequest.Builder builder = ModifyColumnRequest.newBuilder();

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
index cabf557..cfbfccb 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
@@ -37,6 +37,7 @@ import org.junit.experimental.categories.Category;
 
 /** Tests the HColumnDescriptor with appropriate arguments */
 @Category({MiscTests.class, SmallTests.class})
+@Deprecated
 public class TestHColumnDescriptor {
   @Test
   public void testPb() throws DeserializationException {
@@ -71,6 +72,7 @@ public class TestHColumnDescriptor {
     assertTrue(hcd.equals(deserializedHcd));
     assertEquals(v, hcd.getBlocksize());
     assertEquals(v, hcd.getTimeToLive());
+    assertEquals(v, hcd.getScope());
     assertEquals(hcd.getValue("a"), deserializedHcd.getValue("a"));
     assertEquals(hcd.getMaxVersions(), deserializedHcd.getMaxVersions());
     assertEquals(hcd.getMinVersions(), deserializedHcd.getMinVersions());
@@ -92,7 +94,7 @@ public class TestHColumnDescriptor {
     try {
       new HColumnDescriptor("".getBytes());
     } catch (IllegalArgumentException e) {
-      assertEquals("Family name can not be empty", e.getLocalizedMessage());
+      assertEquals("Column Family name can not be empty", e.getLocalizedMessage());
     }
   }
 
@@ -115,9 +117,11 @@ public class TestHColumnDescriptor {
     boolean isMob = true;
     long threshold = 1000;
     String policy = "weekly";
-    String isMobString = PrettyPrinter.format(Bytes.toStringBinary(Bytes.toBytes(isMob)),
+    // We unify the format of all values saved in the descriptor.
+    // Each value is stored as bytes of string.
+    String isMobString = PrettyPrinter.format(String.valueOf(isMob),
             HColumnDescriptor.getUnit(HColumnDescriptor.IS_MOB));
-    String thresholdString = PrettyPrinter.format(Bytes.toStringBinary(Bytes.toBytes(threshold)),
+    String thresholdString = PrettyPrinter.format(String.valueOf(threshold),
             HColumnDescriptor.getUnit(HColumnDescriptor.MOB_THRESHOLD));
     String policyString = PrettyPrinter.format(Bytes.toStringBinary(Bytes.toBytes(policy)),
         HColumnDescriptor.getUnit(HColumnDescriptor.MOB_COMPACT_PARTITION_POLICY));

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
index bcff565..9bbdf50 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
@@ -43,6 +43,7 @@ import org.junit.rules.TestName;
  * Test setting values in the descriptor
  */
 @Category({MiscTests.class, SmallTests.class})
+@Deprecated
 public class TestHTableDescriptor {
   private static final Log LOG = LogFactory.getLog(TestHTableDescriptor.class);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestColumnFamilyDescriptorBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestColumnFamilyDescriptorBuilder.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestColumnFamilyDescriptorBuilder.java
new file mode 100644
index 0000000..5fe329d
--- /dev/null
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestColumnFamilyDescriptorBuilder.java
@@ -0,0 +1,185 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeepDeletedCells;
+import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.exceptions.HBaseException;
+import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.regionserver.BloomType;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.BuilderStyleTest;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.PrettyPrinter;
+import org.junit.Assert;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({MiscTests.class, SmallTests.class})
+public class TestColumnFamilyDescriptorBuilder {
+  @Test
+  public void testBuilder() throws DeserializationException {
+    ColumnFamilyDescriptorBuilder builder
+      = ColumnFamilyDescriptorBuilder.newBuilder(HConstants.CATALOG_FAMILY)
+            .setInMemory(true)
+            .setScope(HConstants.REPLICATION_SCOPE_LOCAL)
+            .setBloomFilterType(BloomType.NONE)
+            .setCacheDataInL1(true);
+    final int v = 123;
+    builder.setBlocksize(v);
+    builder.setTimeToLive(v);
+    builder.setBlockCacheEnabled(!HColumnDescriptor.DEFAULT_BLOCKCACHE);
+    builder.setValue(Bytes.toBytes("a"), Bytes.toBytes("b"));
+    builder.setMaxVersions(v);
+    assertEquals(v, builder.build().getMaxVersions());
+    builder.setMinVersions(v);
+    assertEquals(v, builder.build().getMinVersions());
+    builder.setKeepDeletedCells(KeepDeletedCells.TRUE);
+    builder.setInMemory(!HColumnDescriptor.DEFAULT_IN_MEMORY);
+    boolean inmemory = builder.build().isInMemory();
+    builder.setScope(v);
+    builder.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF);
+    builder.setBloomFilterType(BloomType.ROW);
+    builder.setCompressionType(Algorithm.SNAPPY);
+    builder.setMobEnabled(true);
+    builder.setMobThreshold(1000L);
+    builder.setDFSReplication((short) v);
+
+    ColumnFamilyDescriptor hcd = builder.build();
+    byte [] bytes = ColumnFamilyDescriptorBuilder.toByteArray(hcd);
+    ColumnFamilyDescriptor deserializedHcd = ColumnFamilyDescriptorBuilder.parseFrom(bytes);
+    assertTrue(hcd.equals(deserializedHcd));
+    assertEquals(v, hcd.getBlocksize());
+    assertEquals(v, hcd.getTimeToLive());
+    assertTrue(Bytes.equals(hcd.getValue(Bytes.toBytes("a")), deserializedHcd.getValue(Bytes.toBytes("a"))));
+    assertEquals(hcd.getMaxVersions(), deserializedHcd.getMaxVersions());
+    assertEquals(hcd.getMinVersions(), deserializedHcd.getMinVersions());
+    assertEquals(hcd.getKeepDeletedCells(), deserializedHcd.getKeepDeletedCells());
+    assertEquals(inmemory, deserializedHcd.isInMemory());
+    assertEquals(hcd.getScope(), deserializedHcd.getScope());
+    assertTrue(deserializedHcd.getCompressionType().equals(Compression.Algorithm.SNAPPY));
+    assertTrue(deserializedHcd.getDataBlockEncoding().equals(DataBlockEncoding.FAST_DIFF));
+    assertTrue(deserializedHcd.getBloomFilterType().equals(BloomType.ROW));
+    assertEquals(hcd.isMobEnabled(), deserializedHcd.isMobEnabled());
+    assertEquals(hcd.getMobThreshold(), deserializedHcd.getMobThreshold());
+    assertEquals(v, deserializedHcd.getDFSReplication());
+  }
+
+  @Test
+  /** Tests HColumnDescriptor with empty familyName*/
+  public void testHColumnDescriptorShouldThrowIAEWhenFamiliyNameEmpty()
+      throws Exception {
+    try {
+      ColumnFamilyDescriptorBuilder.newBuilder("".getBytes()).build();
+    } catch (IllegalArgumentException e) {
+      assertEquals("Column Family name can not be empty", e.getLocalizedMessage());
+    }
+  }
+
+  /**
+   * Test that we add and remove strings from configuration properly.
+   */
+  @Test
+  public void testAddGetRemoveConfiguration() {
+    ColumnFamilyDescriptorBuilder builder
+      = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("foo"));
+    String key = "Some";
+    String value = "value";
+    builder.setConfiguration(key, value);
+    assertEquals(value, builder.build().getConfigurationValue(key));
+    builder.removeConfiguration(key);
+    assertEquals(null, builder.build().getConfigurationValue(key));
+  }
+
+  @Test
+  public void testMobValuesInHColumnDescriptorShouldReadable() {
+    boolean isMob = true;
+    long threshold = 1000;
+    String policy = "weekly";
+    // We unify the format of all values saved in the descriptor.
+    // Each value is stored as bytes of string.
+    String isMobString = PrettyPrinter.format(String.valueOf(isMob),
+            HColumnDescriptor.getUnit(HColumnDescriptor.IS_MOB));
+    String thresholdString = PrettyPrinter.format(String.valueOf(threshold),
+            HColumnDescriptor.getUnit(HColumnDescriptor.MOB_THRESHOLD));
+    String policyString = PrettyPrinter.format(Bytes.toStringBinary(Bytes.toBytes(policy)),
+        HColumnDescriptor.getUnit(HColumnDescriptor.MOB_COMPACT_PARTITION_POLICY));
+    assertEquals(String.valueOf(isMob), isMobString);
+    assertEquals(String.valueOf(threshold), thresholdString);
+    assertEquals(String.valueOf(policy), policyString);
+  }
+
+  @Test
+  public void testClassMethodsAreBuilderStyle() {
+    /* HColumnDescriptor should have a builder style setup where setXXX/addXXX methods
+     * can be chainable together:
+     * . For example:
+     * HColumnDescriptor hcd
+     *   = new HColumnDescriptor()
+     *     .setFoo(foo)
+     *     .setBar(bar)
+     *     .setBuz(buz)
+     *
+     * This test ensures that all methods starting with "set" returns the declaring object
+     */
+
+    BuilderStyleTest.assertClassesAreBuilderStyle(ColumnFamilyDescriptorBuilder.class);
+  }
+
+  @Test
+  public void testSetTimeToLive() throws HBaseException {
+    String ttl;
+    ColumnFamilyDescriptorBuilder builder
+      = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("foo"));
+
+    ttl = "50000";
+    builder.setTimeToLive(ttl);
+    Assert.assertEquals(50000, builder.build().getTimeToLive());
+
+    ttl = "50000 seconds";
+    builder.setTimeToLive(ttl);
+    Assert.assertEquals(50000, builder.build().getTimeToLive());
+
+    ttl = "";
+    builder.setTimeToLive(ttl);
+    Assert.assertEquals(0, builder.build().getTimeToLive());
+
+    ttl = "FOREVER";
+    builder.setTimeToLive(ttl);
+    Assert.assertEquals(HConstants.FOREVER, builder.build().getTimeToLive());
+
+    ttl = "1 HOUR 10 minutes 1 second";
+    builder.setTimeToLive(ttl);
+    Assert.assertEquals(4201, builder.build().getTimeToLive());
+
+    ttl = "500 Days 23 HOURS";
+    builder.setTimeToLive(ttl);
+    Assert.assertEquals(43282800, builder.build().getTimeToLive());
+
+    ttl = "43282800 SECONDS (500 Days 23 hours)";
+    builder.setTimeToLive(ttl);
+    Assert.assertEquals(43282800, builder.build().getTimeToLive());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestImmutableHColumnDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestImmutableHColumnDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestImmutableHColumnDescriptor.java
new file mode 100644
index 0000000..12f1bec
--- /dev/null
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestImmutableHColumnDescriptor.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.client;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.function.Consumer;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.KeepDeletedCells;
+import org.apache.hadoop.hbase.MemoryCompactionPolicy;
+import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.regionserver.BloomType;
+import org.apache.hadoop.hbase.testclassification.ClientTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.BuilderStyleTest;
+import org.apache.hadoop.hbase.util.Bytes;
+import static org.junit.Assert.fail;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
+@Category({ClientTests.class, SmallTests.class})
+public class TestImmutableHColumnDescriptor {
+  @Rule
+  public TestName name = new TestName();
+  private static final List<Consumer<ImmutableHColumnDescriptor>> TEST_FUNCTION = Arrays.asList(
+    hcd -> hcd.setValue("a", "a"),
+    hcd -> hcd.setValue(Bytes.toBytes("a"), Bytes.toBytes("a")),
+    hcd -> hcd.setConfiguration("aaa", "ccc"),
+    hcd -> hcd.remove(Bytes.toBytes("aaa")),
+    hcd -> hcd.removeConfiguration("xxx"),
+    hcd -> hcd.setBlockCacheEnabled(false),
+    hcd -> hcd.setBlocksize(10),
+    hcd -> hcd.setBloomFilterType(BloomType.NONE),
+    hcd -> hcd.setCacheBloomsOnWrite(false),
+    hcd -> hcd.setCacheDataInL1(true),
+    hcd -> hcd.setCacheDataOnWrite(true),
+    hcd -> hcd.setCacheIndexesOnWrite(true),
+    hcd -> hcd.setCompactionCompressionType(Compression.Algorithm.LZO),
+    hcd -> hcd.setCompressTags(true),
+    hcd -> hcd.setCompressionType(Compression.Algorithm.LZO),
+    hcd -> hcd.setDFSReplication((short) 10),
+    hcd -> hcd.setDataBlockEncoding(DataBlockEncoding.NONE),
+    hcd -> hcd.setEncryptionKey(Bytes.toBytes("xxx")),
+    hcd -> hcd.setEncryptionType("xxx"),
+    hcd -> hcd.setEvictBlocksOnClose(true),
+    hcd -> hcd.setInMemory(true),
+    hcd -> hcd.setInMemoryCompaction(MemoryCompactionPolicy.NONE),
+    hcd -> hcd.setKeepDeletedCells(KeepDeletedCells.FALSE),
+    hcd -> hcd.setMaxVersions(1000),
+    hcd -> hcd.setMinVersions(10),
+    hcd -> hcd.setMobCompactPartitionPolicy(MobCompactPartitionPolicy.DAILY),
+    hcd -> hcd.setMobEnabled(true),
+    hcd -> hcd.setMobThreshold(10),
+    hcd -> hcd.setPrefetchBlocksOnOpen(true),
+    hcd -> hcd.setScope(0),
+    hcd -> hcd.setStoragePolicy("aaa"),
+    hcd -> hcd.setTimeToLive(100),
+    hcd -> hcd.setVersions(1, 10)
+  );
+
+  @Test
+  public void testImmutable() {
+    ImmutableHColumnDescriptor hcd = new ImmutableHColumnDescriptor(
+      new HColumnDescriptor(Bytes.toBytes(name.getMethodName())));
+    for (int i = 0; i != TEST_FUNCTION.size(); ++i) {
+      try {
+        TEST_FUNCTION.get(i).accept(hcd);
+        fail("ImmutableHTableDescriptor can't be modified!!! The index of method is " + i);
+      } catch (UnsupportedOperationException e) {
+      }
+    }
+  }
+
+  @Test
+  public void testClassMethodsAreBuilderStyle() {
+    BuilderStyleTest.assertClassesAreBuilderStyle(ImmutableHColumnDescriptor.class);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestImmutableHTableDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestImmutableHTableDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestImmutableHTableDescriptor.java
index 91ef72a..6c20bc8 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestImmutableHTableDescriptor.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestImmutableHTableDescriptor.java
@@ -72,11 +72,11 @@ public class TestImmutableHTableDescriptor {
 
   @Test
   public void testImmutable() {
-    ImmutableHTableDescriptor htd = new ImmutableHTableDescriptor(
-      new HTableDescriptor(TableName.valueOf(name.getMethodName())));
+    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
+    ImmutableHTableDescriptor immutableHtd = new ImmutableHTableDescriptor(htd);
     TEST_FUNCTION.forEach(f -> {
       try {
-        f.accept(htd);
+        f.accept(immutableHtd);
         fail("ImmutableHTableDescriptor can't be modified!!!");
       } catch (UnsupportedOperationException e) {
       }
@@ -84,6 +84,27 @@ public class TestImmutableHTableDescriptor {
   }
 
   @Test
+  public void testImmutableHColumnDescriptor() {
+    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
+    htd.addFamily(new HColumnDescriptor(Bytes.toBytes("family")));
+    ImmutableHTableDescriptor immutableHtd = new ImmutableHTableDescriptor(htd);
+    for (HColumnDescriptor hcd : immutableHtd.getColumnFamilies()) {
+      assertReadOnly(hcd);
+    }
+    for (HColumnDescriptor hcd : immutableHtd.getFamilies()) {
+      assertReadOnly(hcd);
+    }
+  }
+
+  private void assertReadOnly(HColumnDescriptor hcd) {
+    try {
+      hcd.setBlocksize(10);
+      fail("ImmutableHColumnDescriptor can't be modified!!!");
+    } catch (UnsupportedOperationException e) {
+    }
+  }
+
+  @Test
   public void testClassMethodsAreBuilderStyle() {
   /* ImmutableHTableDescriptor should have a builder style setup where setXXX/addXXX methods
    * can be chainable together:

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java
index c4ecacf..bc1c19e 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java
@@ -28,7 +28,6 @@ import java.util.regex.Pattern;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -109,7 +108,7 @@ public class TestTableDescriptorBuilder {
           .build();
 
     byte [] bytes = TableDescriptorBuilder.toByteArray(htd);
-    TableDescriptor deserializedHtd = TableDescriptorBuilder.newBuilder(bytes).build();
+    TableDescriptor deserializedHtd = TableDescriptorBuilder.parseFrom(bytes);
     assertEquals(htd, deserializedHtd);
     assertEquals(v, deserializedHtd.getMaxFileSize());
     assertTrue(deserializedHtd.isReadOnly());
@@ -195,7 +194,7 @@ public class TestTableDescriptorBuilder {
             .build();
     assertTrue(Bytes.equals(value, desc.getValue(key)));
     desc = TableDescriptorBuilder.newBuilder(desc)
-            .remove(key)
+            .removeValue(key)
             .build();
     assertTrue(desc.getValue(key) == null);
   }
@@ -299,24 +298,26 @@ public class TestTableDescriptorBuilder {
   @Test
   public void testModifyFamily() {
     byte[] familyName = Bytes.toBytes("cf");
-    HColumnDescriptor hcd = new HColumnDescriptor(familyName);
-    hcd.setBlocksize(1000);
-    hcd.setDFSReplication((short) 3);
+    ColumnFamilyDescriptor hcd = ColumnFamilyDescriptorBuilder.newBuilder(familyName)
+            .setBlocksize(1000)
+            .setDFSReplication((short) 3)
+            .build();
     TableDescriptor htd
       = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()))
-              .addFamily(hcd)
+              .addColumnFamily(hcd)
               .build();
 
-    assertEquals(1000, htd.getFamily(familyName).getBlocksize());
-    assertEquals(3, htd.getFamily(familyName).getDFSReplication());
-    hcd = new HColumnDescriptor(familyName);
-    hcd.setBlocksize(2000);
-    hcd.setDFSReplication((short) 1);
+    assertEquals(1000, htd.getColumnFamily(familyName).getBlocksize());
+    assertEquals(3, htd.getColumnFamily(familyName).getDFSReplication());
+    hcd = ColumnFamilyDescriptorBuilder.newBuilder(familyName)
+            .setBlocksize(2000)
+            .setDFSReplication((short) 1)
+            .build();
     htd = TableDescriptorBuilder.newBuilder(htd)
-              .modifyFamily(hcd)
+              .modifyColumnFamily(hcd)
               .build();
-    assertEquals(2000, htd.getFamily(familyName).getBlocksize());
-    assertEquals(1, htd.getFamily(familyName).getDFSReplication());
+    assertEquals(2000, htd.getColumnFamily(familyName).getBlocksize());
+    assertEquals(1, htd.getColumnFamily(familyName).getDFSReplication());
   }
 
   @Test(expected=IllegalArgumentException.class)
@@ -325,23 +326,25 @@ public class TestTableDescriptorBuilder {
     HColumnDescriptor hcd = new HColumnDescriptor(familyName);
     TableDescriptor htd = TableDescriptorBuilder
             .newBuilder(TableName.valueOf(name.getMethodName()))
-            .modifyFamily(hcd)
+            .modifyColumnFamily(hcd)
             .build();
   }
 
   @Test(expected=IllegalArgumentException.class)
   public void testAddDuplicateFamilies() {
     byte[] familyName = Bytes.toBytes("cf");
-    HColumnDescriptor hcd = new HColumnDescriptor(familyName);
-    hcd.setBlocksize(1000);
+    ColumnFamilyDescriptor hcd = ColumnFamilyDescriptorBuilder.newBuilder(familyName)
+            .setBlocksize(1000)
+            .build();
     TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()))
-            .addFamily(hcd)
+            .addColumnFamily(hcd)
+            .build();
+    assertEquals(1000, htd.getColumnFamily(familyName).getBlocksize());
+    hcd = ColumnFamilyDescriptorBuilder.newBuilder(familyName)
+            .setBlocksize(2000)
             .build();
-    assertEquals(1000, htd.getFamily(familyName).getBlocksize());
-    hcd = new HColumnDescriptor(familyName);
-    hcd.setBlocksize(2000);
     // add duplicate column
-    TableDescriptorBuilder.newBuilder(htd).addFamily(hcd).build();
+    TableDescriptorBuilder.newBuilder(htd).addColumnFamily(hcd).build();
   }
 
   @Test
@@ -358,18 +361,18 @@ public class TestTableDescriptorBuilder {
     hcdWithScope.setScope(HConstants.REPLICATION_SCOPE_SERIAL);
     HColumnDescriptor hcdWithoutScope = new HColumnDescriptor(Bytes.toBytes("cf1"));
     TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()))
-            .addFamily(hcdWithoutScope)
+            .addColumnFamily(hcdWithoutScope)
             .build();
     assertFalse(htd.hasSerialReplicationScope());
 
     htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()))
-            .addFamily(hcdWithScope)
+            .addColumnFamily(hcdWithScope)
             .build();
     assertTrue(htd.hasSerialReplicationScope());
 
     htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()))
-            .addFamily(hcdWithScope)
-            .addFamily(hcdWithoutScope)
+            .addColumnFamily(hcdWithScope)
+            .addColumnFamily(hcdWithoutScope)
             .build();
     assertTrue(htd.hasSerialReplicationScope());
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
index 791445b..d514003 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
@@ -21,14 +21,14 @@ import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_IOENGINE_KEY;
 import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_SIZE_KEY;
 
 import java.io.IOException;
-import java.lang.management.ManagementFactory;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
 import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
 import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
@@ -37,6 +37,7 @@ import org.apache.hadoop.util.StringUtils;
 
 import com.google.common.annotations.VisibleForTesting;
 
+
 /**
  * Stores all of the cache objects and configuration for a single HFile.
  */
@@ -232,7 +233,7 @@ public class CacheConfig {
    * @param conf hbase configuration
    * @param family column family configuration
    */
-  public CacheConfig(Configuration conf, HColumnDescriptor family) {
+  public CacheConfig(Configuration conf, ColumnFamilyDescriptor family) {
     this(CacheConfig.instantiateBlockCache(conf),
         conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ)
            && family.isBlockCacheEnabled(),
@@ -250,8 +251,8 @@ public class CacheConfig {
         conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED),
         conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY,
             DEFAULT_PREFETCH_ON_OPEN) || family.isPrefetchBlocksOnOpen(),
-        conf.getBoolean(HColumnDescriptor.CACHE_DATA_IN_L1,
-            HColumnDescriptor.DEFAULT_CACHE_DATA_IN_L1) || family.isCacheDataInL1(),
+        conf.getBoolean(ColumnFamilyDescriptorBuilder.CACHE_DATA_IN_L1,
+            ColumnFamilyDescriptorBuilder.DEFAULT_CACHE_DATA_IN_L1) || family.isCacheDataInL1(),
         conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY, DROP_BEHIND_CACHE_COMPACTION_DEFAULT)
      );
     LOG.info("Created cacheConfig for " + family.getNameAsString() + ": " + this);
@@ -260,8 +261,8 @@ public class CacheConfig {
   /**
    * Create a cache configuration using the specified configuration object and
    * defaults for family level settings. Only use if no column family context. Prefer
-   * {@link CacheConfig#CacheConfig(Configuration, HColumnDescriptor)}
-   * @see #CacheConfig(Configuration, HColumnDescriptor)
+   * {@link CacheConfig#CacheConfig(Configuration, ColumnFamilyDescriptor)}
+   * @see #CacheConfig(Configuration, ColumnFamilyDescriptor)
    * @param conf hbase configuration
    */
   public CacheConfig(Configuration conf) {
@@ -275,8 +276,8 @@ public class CacheConfig {
         conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY, DEFAULT_EVICT_ON_CLOSE),
         conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED),
         conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY, DEFAULT_PREFETCH_ON_OPEN),
-        conf.getBoolean(HColumnDescriptor.CACHE_DATA_IN_L1,
-          HColumnDescriptor.DEFAULT_CACHE_DATA_IN_L1),
+        conf.getBoolean(ColumnFamilyDescriptorBuilder.CACHE_DATA_IN_L1,
+          ColumnFamilyDescriptorBuilder.DEFAULT_CACHE_DATA_IN_L1),
         conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY, DROP_BEHIND_CACHE_COMPACTION_DEFAULT)
      );
     LOG.info("Created cacheConfig: " + this);

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index c43a4d1..c6397f3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.UnknownRegionException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.MasterSwitchType;
 import org.apache.hadoop.hbase.client.TableState;
 import org.apache.hadoop.hbase.client.VersionInfoUtil;

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
index 2e2aa5d..c423f17 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hbase.master.assignment;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 
 import java.io.IOException;
 import java.io.InputStream;
@@ -521,7 +522,7 @@ public class SplitTableRegionProcedure
     // Split each store file.
     final TableDescriptor htd = env.getMasterServices().getTableDescriptors().get(getTableName());
     for (String family: regionFs.getFamilies()) {
-      final HColumnDescriptor hcd = htd.getFamily(family.getBytes());
+      final ColumnFamilyDescriptor hcd = htd.getColumnFamily(family.getBytes());
       final Collection<StoreFileInfo> storeFiles = regionFs.getStoreFiles(family);
       if (storeFiles != null && storeFiles.size() > 0) {
         final CacheConfig cacheConf = new CacheConfig(conf, hcd);

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
index 70fe5c5..dcd3144 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
@@ -130,7 +130,7 @@ public class FSTableDescriptors implements TableDescriptors {
   public static HTableDescriptor createMetaTableDescriptor(final Configuration conf)
       throws IOException {
     return new HTableDescriptor(TableDescriptorBuilder.newBuilder(TableName.META_TABLE_NAME)
-          .addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY)
+          .addColumnFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY)
             .setMaxVersions(conf.getInt(HConstants.HBASE_META_VERSIONS,
                     HConstants.DEFAULT_HBASE_META_VERSIONS))
             .setInMemory(true)
@@ -142,7 +142,7 @@ public class FSTableDescriptors implements TableDescriptors {
             // Enable cache of data blocks in L1 if more than one caching tier deployed:
             // e.g. if using CombinedBlockCache (BucketCache).
             .setCacheDataInL1(true))
-          .addFamily(new HColumnDescriptor(HConstants.REPLICATION_BARRIER_FAMILY)
+          .addColumnFamily(new HColumnDescriptor(HConstants.REPLICATION_BARRIER_FAMILY)
             .setMaxVersions(conf.getInt(HConstants.HBASE_META_VERSIONS,
                     HConstants.DEFAULT_HBASE_META_VERSIONS))
             .setInMemory(true)
@@ -154,7 +154,7 @@ public class FSTableDescriptors implements TableDescriptors {
             // Enable cache of data blocks in L1 if more than one caching tier deployed:
             // e.g. if using CombinedBlockCache (BucketCache).
             .setCacheDataInL1(true))
-          .addFamily(new HColumnDescriptor(HConstants.REPLICATION_POSITION_FAMILY)
+          .addColumnFamily(new HColumnDescriptor(HConstants.REPLICATION_POSITION_FAMILY)
             .setMaxVersions(conf.getInt(HConstants.HBASE_META_VERSIONS,
                     HConstants.DEFAULT_HBASE_META_VERSIONS))
             .setInMemory(true)
@@ -166,7 +166,7 @@ public class FSTableDescriptors implements TableDescriptors {
             // Enable cache of data blocks in L1 if more than one caching tier deployed:
             // e.g. if using CombinedBlockCache (BucketCache).
             .setCacheDataInL1(true))
-          .addFamily(new HColumnDescriptor(HConstants.REPLICATION_META_FAMILY)
+          .addColumnFamily(new HColumnDescriptor(HConstants.REPLICATION_META_FAMILY)
             .setMaxVersions(conf.getInt(HConstants.HBASE_META_VERSIONS,
                     HConstants.DEFAULT_HBASE_META_VERSIONS))
             .setInMemory(true)
@@ -178,7 +178,7 @@ public class FSTableDescriptors implements TableDescriptors {
             // Enable cache of data blocks in L1 if more than one caching tier deployed:
             // e.g. if using CombinedBlockCache (BucketCache).
             .setCacheDataInL1(true))
-          .addFamily(new HColumnDescriptor(HConstants.TABLE_FAMILY)
+          .addColumnFamily(new HColumnDescriptor(HConstants.TABLE_FAMILY)
             // Ten is arbitrary number.  Keep versions to help debugging.
             .setMaxVersions(10)
             .setInMemory(true)

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
index 15250ac..cc7d0a3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
@@ -101,7 +101,7 @@ public class TestAcidGuarantees implements Tool {
     }
 
     if (useMob) {
-      HTableDescriptor htd = util.getAdmin().getTableDescriptor(TABLE_NAME);
+      HTableDescriptor htd = new HTableDescriptor(util.getAdmin().getTableDescriptor(TABLE_NAME));
       HColumnDescriptor hcd =  htd.getColumnFamilies()[0];
       // force mob enabled such that all data is mob data
       hcd.setMobEnabled(true);

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
index f84d9c2..c5681b1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
@@ -526,7 +526,7 @@ public class TestAdmin1 {
       expectedException = true;
     }
     assertFalse(expectedException);
-    HTableDescriptor modifiedHtd = this.admin.getTableDescriptor(tableName);
+    HTableDescriptor modifiedHtd = new HTableDescriptor(this.admin.getTableDescriptor(tableName));
     assertFalse(htd.equals(modifiedHtd));
     assertTrue(copy.equals(modifiedHtd));
     assertEquals(newFlushSize, modifiedHtd.getMemStoreFlushSize());

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
index 3201fbe..c0ccd5e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
@@ -725,7 +725,7 @@ public class TestAsyncTableAdminApi extends TestAsyncAdminBase {
       admin.modifyColumnFamily(tableName, cfDescriptor).join();
 
       TableDescriptor htd = admin.getTableDescriptor(tableName).get();
-      HColumnDescriptor hcfd = htd.getFamily(FAMILY_0);
+      ColumnFamilyDescriptor hcfd = htd.getColumnFamily(FAMILY_0);
       assertTrue(hcfd.getBlocksize() == newBlockSize);
     } finally {
       admin.deleteTable(tableName).join();

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java
index f3797d1..3b5522b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java
@@ -104,7 +104,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase {
 
   @Test(timeout = 300000)
   public void testEnableReplicationWhenReplicationNotEnabled() throws Exception {
-    HTableDescriptor table = admin1.getTableDescriptor(tableName);
+    HTableDescriptor table = new HTableDescriptor(admin1.getTableDescriptor(tableName));
     for (HColumnDescriptor fam : table.getColumnFamilies()) {
       fam.setScope(HConstants.REPLICATION_SCOPE_LOCAL);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/72cb7d97/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
index 59e8fb3..569b170 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
@@ -172,7 +172,7 @@ public class TestCatalogJanitor {
    */
   private TableDescriptor createTableDescriptorForCurrentMethod() {
     return TableDescriptorBuilder.newBuilder(TableName.valueOf(this.name.getMethodName())).
-        addFamily(new HColumnDescriptor(MockMasterServices.DEFAULT_COLUMN_FAMILY_NAME)).
+        addColumnFamily(new HColumnDescriptor(MockMasterServices.DEFAULT_COLUMN_FAMILY_NAME)).
         build();
   }
 


Mime
View raw message