hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ecl...@apache.org
Subject [16/38] HBASE-12197 Move rest to it's on module
Date Fri, 10 Oct 2014 16:53:19 GMT
http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
deleted file mode 100644
index 2ffdd4f..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
+++ /dev/null
@@ -1,852 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest.model;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.io.StringReader;
-import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableSet;
-
-import javax.xml.bind.annotation.XmlAttribute;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.filter.BinaryComparator;
-import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
-import org.apache.hadoop.hbase.filter.BitComparator;
-import org.apache.hadoop.hbase.filter.ByteArrayComparable;
-import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;
-import org.apache.hadoop.hbase.filter.ColumnPaginationFilter;
-import org.apache.hadoop.hbase.filter.ColumnPrefixFilter;
-import org.apache.hadoop.hbase.filter.ColumnRangeFilter;
-import org.apache.hadoop.hbase.filter.CompareFilter;
-import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-import org.apache.hadoop.hbase.filter.DependentColumnFilter;
-import org.apache.hadoop.hbase.filter.FamilyFilter;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.FilterList;
-import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
-import org.apache.hadoop.hbase.filter.InclusiveStopFilter;
-import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
-import org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter;
-import org.apache.hadoop.hbase.filter.NullComparator;
-import org.apache.hadoop.hbase.filter.PageFilter;
-import org.apache.hadoop.hbase.filter.PrefixFilter;
-import org.apache.hadoop.hbase.filter.QualifierFilter;
-import org.apache.hadoop.hbase.filter.RandomRowFilter;
-import org.apache.hadoop.hbase.filter.RegexStringComparator;
-import org.apache.hadoop.hbase.filter.RowFilter;
-import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter;
-import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
-import org.apache.hadoop.hbase.filter.SkipFilter;
-import org.apache.hadoop.hbase.filter.SubstringComparator;
-import org.apache.hadoop.hbase.filter.TimestampsFilter;
-import org.apache.hadoop.hbase.filter.ValueFilter;
-import org.apache.hadoop.hbase.filter.WhileMatchFilter;
-import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
-import org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner;
-import org.apache.hadoop.hbase.security.visibility.Authorizations;
-import org.apache.hadoop.hbase.util.Base64;
-import org.apache.hadoop.hbase.util.ByteStringer;
-import org.apache.hadoop.hbase.util.Bytes;
-
-import com.google.protobuf.ByteString;
-import com.sun.jersey.api.json.JSONConfiguration;
-import com.sun.jersey.api.json.JSONJAXBContext;
-import com.sun.jersey.api.json.JSONMarshaller;
-import com.sun.jersey.api.json.JSONUnmarshaller;
-
-/**
- * A representation of Scanner parameters.
- * 
- * <pre>
- * &lt;complexType name="Scanner"&gt;
- *   &lt;sequence>
- *     &lt;element name="column" type="base64Binary" minOccurs="0" maxOccurs="unbounded"/&gt;
- *     &lt;element name="filter" type="string" minOccurs="0" maxOccurs="1"&gt;&lt;/element&gt;
- *   &lt;/sequence&gt;
- *   &lt;attribute name="startRow" type="base64Binary"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="endRow" type="base64Binary"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="batch" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="caching" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="startTime" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="endTime" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="maxVersions" type="int"&gt;&lt;/attribute&gt;
- * &lt;/complexType&gt;
- * </pre>
- */
-@XmlRootElement(name="Scanner")
-@InterfaceAudience.Private
-public class ScannerModel implements ProtobufMessageHandler, Serializable {
-
-  private static final long serialVersionUID = 1L;
-
-  private byte[] startRow = HConstants.EMPTY_START_ROW;
-  private byte[] endRow = HConstants.EMPTY_END_ROW;;
-  private List<byte[]> columns = new ArrayList<byte[]>();
-  private int batch = Integer.MAX_VALUE;
-  private long startTime = 0;
-  private long endTime = Long.MAX_VALUE;
-  private String filter = null;
-  private int maxVersions = Integer.MAX_VALUE;
-  private int caching = -1;
-  private List<String> labels = new ArrayList<String>();
-  private boolean cacheBlocks = true;
-  
-  @XmlRootElement
-  static class FilterModel {
-    
-    @XmlRootElement
-    static class ByteArrayComparableModel {
-      @XmlAttribute public String type;
-      @XmlAttribute public String value;
-      @XmlAttribute public String op;
-
-      static enum ComparatorType {
-        BinaryComparator,
-        BinaryPrefixComparator,
-        BitComparator,
-        NullComparator,
-        RegexStringComparator,
-        SubstringComparator    
-      }
-
-      public ByteArrayComparableModel() { }
-
-      public ByteArrayComparableModel(
-          ByteArrayComparable comparator) {
-        String typeName = comparator.getClass().getSimpleName();
-        ComparatorType type = ComparatorType.valueOf(typeName);
-        this.type = typeName;
-        switch (type) {
-          case BinaryComparator:
-          case BinaryPrefixComparator:
-            this.value = Base64.encodeBytes(comparator.getValue());
-            break;
-          case BitComparator:
-            this.value = Base64.encodeBytes(comparator.getValue());
-            this.op = ((BitComparator)comparator).getOperator().toString();
-            break;
-          case NullComparator:
-            break;
-          case RegexStringComparator:
-          case SubstringComparator:
-            this.value = Bytes.toString(comparator.getValue());
-            break;
-          default:
-            throw new RuntimeException("unhandled filter type: " + type);
-        }
-      }
-
-      public ByteArrayComparable build() {
-        ByteArrayComparable comparator;
-        switch (ComparatorType.valueOf(type)) {
-          case BinaryComparator:
-            comparator = new BinaryComparator(Base64.decode(value));
-            break;
-          case BinaryPrefixComparator:
-            comparator = new BinaryPrefixComparator(Base64.decode(value));
-            break;
-          case BitComparator:
-            comparator = new BitComparator(Base64.decode(value),
-                BitComparator.BitwiseOp.valueOf(op));
-            break;
-          case NullComparator:
-            comparator = new NullComparator();
-            break;
-          case RegexStringComparator:
-            comparator = new RegexStringComparator(value);
-            break;
-          case SubstringComparator:
-            comparator = new SubstringComparator(value);
-            break;
-          default:
-            throw new RuntimeException("unhandled comparator type: " + type);
-        }
-        return comparator;
-      }
-
-    }
-
-    // A grab bag of fields, would have been a union if this were C.
-    // These are null by default and will only be serialized if set (non null).
-    @XmlAttribute public String type;
-    @XmlAttribute public String op;
-    @XmlElement ByteArrayComparableModel comparator;
-    @XmlAttribute public String value;
-    @XmlElement public List<FilterModel> filters;
-    @XmlAttribute public Integer limit;
-    @XmlAttribute public Integer offset;
-    @XmlAttribute public String family;
-    @XmlAttribute public String qualifier;
-    @XmlAttribute public Boolean ifMissing;
-    @XmlAttribute public Boolean latestVersion;
-    @XmlAttribute public String minColumn;
-    @XmlAttribute public Boolean minColumnInclusive;
-    @XmlAttribute public String maxColumn;
-    @XmlAttribute public Boolean maxColumnInclusive;
-    @XmlAttribute public Boolean dropDependentColumn;
-    @XmlAttribute public Float chance;
-    @XmlElement public List<String> prefixes;
-    @XmlElement public List<Long> timestamps;
-
-    static enum FilterType {
-      ColumnCountGetFilter,
-      ColumnPaginationFilter,
-      ColumnPrefixFilter,
-      ColumnRangeFilter,
-      DependentColumnFilter,
-      FamilyFilter,
-      FilterList,
-      FirstKeyOnlyFilter,
-      InclusiveStopFilter,
-      KeyOnlyFilter,
-      MultipleColumnPrefixFilter,
-      PageFilter,
-      PrefixFilter,
-      QualifierFilter,
-      RandomRowFilter,
-      RowFilter,
-      SingleColumnValueExcludeFilter,
-      SingleColumnValueFilter,
-      SkipFilter,
-      TimestampsFilter,
-      ValueFilter,
-      WhileMatchFilter    
-    }
-
-    public FilterModel() { }
-    
-    public FilterModel(Filter filter) { 
-      String typeName = filter.getClass().getSimpleName();
-      FilterType type = FilterType.valueOf(typeName);
-      this.type = typeName;
-      switch (type) {
-        case ColumnCountGetFilter:
-          this.limit = ((ColumnCountGetFilter)filter).getLimit();
-          break;
-        case ColumnPaginationFilter:
-          this.limit = ((ColumnPaginationFilter)filter).getLimit();
-          this.offset = ((ColumnPaginationFilter)filter).getOffset();
-          break;
-        case ColumnPrefixFilter:
-          this.value = Base64.encodeBytes(((ColumnPrefixFilter)filter).getPrefix());
-          break;
-        case ColumnRangeFilter:
-          this.minColumn = Base64.encodeBytes(((ColumnRangeFilter)filter).getMinColumn());
-          this.minColumnInclusive = ((ColumnRangeFilter)filter).getMinColumnInclusive();
-          this.maxColumn = Base64.encodeBytes(((ColumnRangeFilter)filter).getMaxColumn());
-          this.maxColumnInclusive = ((ColumnRangeFilter)filter).getMaxColumnInclusive();
-          break;
-        case DependentColumnFilter: {
-          DependentColumnFilter dcf = (DependentColumnFilter)filter;
-          this.family = Base64.encodeBytes(dcf.getFamily());
-          byte[] qualifier = dcf.getQualifier();
-          if (qualifier != null) {
-            this.qualifier = Base64.encodeBytes(qualifier);
-          }
-          this.op = dcf.getOperator().toString();
-          this.comparator = new ByteArrayComparableModel(dcf.getComparator());
-          this.dropDependentColumn = dcf.dropDependentColumn();
-        } break;
-        case FilterList:
-          this.op = ((FilterList)filter).getOperator().toString();
-          this.filters = new ArrayList<FilterModel>();
-          for (Filter child: ((FilterList)filter).getFilters()) {
-            this.filters.add(new FilterModel(child));
-          }
-          break;
-        case FirstKeyOnlyFilter:
-        case KeyOnlyFilter:
-          break;
-        case InclusiveStopFilter:
-          this.value = 
-            Base64.encodeBytes(((InclusiveStopFilter)filter).getStopRowKey());
-          break;
-        case MultipleColumnPrefixFilter:
-          this.prefixes = new ArrayList<String>();
-          for (byte[] prefix: ((MultipleColumnPrefixFilter)filter).getPrefix()) {
-            this.prefixes.add(Base64.encodeBytes(prefix));
-          }
-          break;
-        case PageFilter:
-          this.value = Long.toString(((PageFilter)filter).getPageSize());
-          break;
-        case PrefixFilter:
-          this.value = Base64.encodeBytes(((PrefixFilter)filter).getPrefix());
-          break;
-        case FamilyFilter:
-        case QualifierFilter:
-        case RowFilter:
-        case ValueFilter:
-          this.op = ((CompareFilter)filter).getOperator().toString();
-          this.comparator = 
-            new ByteArrayComparableModel(
-              ((CompareFilter)filter).getComparator());
-          break;
-        case RandomRowFilter:
-          this.chance = ((RandomRowFilter)filter).getChance();
-          break;
-        case SingleColumnValueExcludeFilter:
-        case SingleColumnValueFilter: {
-          SingleColumnValueFilter scvf = (SingleColumnValueFilter) filter;
-          this.family = Base64.encodeBytes(scvf.getFamily());
-          byte[] qualifier = scvf.getQualifier();
-          if (qualifier != null) {
-            this.qualifier = Base64.encodeBytes(qualifier);
-          }
-          this.op = scvf.getOperator().toString();
-          this.comparator = 
-            new ByteArrayComparableModel(scvf.getComparator());
-          if (scvf.getFilterIfMissing()) {
-            this.ifMissing = true;
-          }
-          if (scvf.getLatestVersionOnly()) {
-            this.latestVersion = true;
-          }
-        } break;
-        case SkipFilter:
-          this.filters = new ArrayList<FilterModel>();
-          this.filters.add(new FilterModel(((SkipFilter)filter).getFilter()));
-          break;
-        case TimestampsFilter:
-          this.timestamps = ((TimestampsFilter)filter).getTimestamps();
-          break;
-        case WhileMatchFilter:
-          this.filters = new ArrayList<FilterModel>();
-          this.filters.add(
-            new FilterModel(((WhileMatchFilter)filter).getFilter()));
-          break;
-        default:
-          throw new RuntimeException("unhandled filter type " + type);
-      }
-    }
-
-    public Filter build() {
-      Filter filter;
-      switch (FilterType.valueOf(type)) {
-      case ColumnCountGetFilter:
-        filter = new ColumnCountGetFilter(limit);
-        break;
-      case ColumnPaginationFilter:
-        filter = new ColumnPaginationFilter(limit, offset);
-        break;
-      case ColumnPrefixFilter:
-        filter = new ColumnPrefixFilter(Base64.decode(value));
-        break;
-      case ColumnRangeFilter:
-        filter = new ColumnRangeFilter(Base64.decode(minColumn),
-            minColumnInclusive, Base64.decode(maxColumn),
-            maxColumnInclusive);
-        break;
-      case DependentColumnFilter:
-        filter = new DependentColumnFilter(Base64.decode(family),
-            qualifier != null ? Base64.decode(qualifier) : null,
-            dropDependentColumn, CompareOp.valueOf(op), comparator.build());
-        break;
-      case FamilyFilter:
-        filter = new FamilyFilter(CompareOp.valueOf(op), comparator.build());
-        break;
-      case FilterList: {
-        List<Filter> list = new ArrayList<Filter>();
-        for (FilterModel model: filters) {
-          list.add(model.build());
-        }
-        filter = new FilterList(FilterList.Operator.valueOf(op), list);
-      } break;
-      case FirstKeyOnlyFilter:
-        filter = new FirstKeyOnlyFilter();
-        break;
-      case InclusiveStopFilter:
-        filter = new InclusiveStopFilter(Base64.decode(value));
-        break;
-      case KeyOnlyFilter:
-        filter = new KeyOnlyFilter();
-        break;
-      case MultipleColumnPrefixFilter: {
-        byte[][] values = new byte[prefixes.size()][];
-        for (int i = 0; i < prefixes.size(); i++) {
-          values[i] = Base64.decode(prefixes.get(i));
-        }
-        filter = new MultipleColumnPrefixFilter(values);
-      } break;
-      case PageFilter:
-        filter = new PageFilter(Long.valueOf(value));
-        break;
-      case PrefixFilter:
-        filter = new PrefixFilter(Base64.decode(value));
-        break;
-      case QualifierFilter:
-        filter = new QualifierFilter(CompareOp.valueOf(op), comparator.build());
-        break;
-      case RandomRowFilter:
-        filter = new RandomRowFilter(chance);
-        break;
-      case RowFilter:
-        filter = new RowFilter(CompareOp.valueOf(op), comparator.build());
-        break;
-      case SingleColumnValueFilter:
-        filter = new SingleColumnValueFilter(Base64.decode(family),
-          qualifier != null ? Base64.decode(qualifier) : null,
-          CompareOp.valueOf(op), comparator.build());
-        if (ifMissing != null) {
-          ((SingleColumnValueFilter)filter).setFilterIfMissing(ifMissing);
-        }
-        if (latestVersion != null) {
-          ((SingleColumnValueFilter)filter).setLatestVersionOnly(latestVersion);
-        }
-        break;
-      case SingleColumnValueExcludeFilter:
-        filter = new SingleColumnValueExcludeFilter(Base64.decode(family),
-          qualifier != null ? Base64.decode(qualifier) : null,
-          CompareOp.valueOf(op), comparator.build());
-        if (ifMissing != null) {
-          ((SingleColumnValueExcludeFilter)filter).setFilterIfMissing(ifMissing);
-        }
-        if (latestVersion != null) {
-          ((SingleColumnValueExcludeFilter)filter).setLatestVersionOnly(latestVersion);
-        }
-        break;
-      case SkipFilter:
-        filter = new SkipFilter(filters.get(0).build());
-        break;
-      case TimestampsFilter:
-        filter = new TimestampsFilter(timestamps);
-        break;
-      case ValueFilter:
-        filter = new ValueFilter(CompareOp.valueOf(op), comparator.build());
-        break;
-      case WhileMatchFilter:
-        filter = new WhileMatchFilter(filters.get(0).build());
-        break;
-      default:
-        throw new RuntimeException("unhandled filter type: " + type);
-      }
-      return filter;
-    }
-
-  }
-
-  /**
-   * @param s the JSON representation of the filter
-   * @return the filter
-   * @throws Exception
-   */
-  public static Filter buildFilter(String s) throws Exception {
-    JSONJAXBContext context =
-      new JSONJAXBContext(JSONConfiguration.natural().build(),
-        FilterModel.class);
-    JSONUnmarshaller unmarshaller = context.createJSONUnmarshaller();
-    FilterModel model = unmarshaller.unmarshalFromJSON(new StringReader(s),
-      FilterModel.class);
-    return model.build();
-  }
-
-  /**
-   * @param filter the filter
-   * @return the JSON representation of the filter
-   * @throws Exception 
-   */
-  public static String stringifyFilter(final Filter filter) throws Exception {
-    JSONJAXBContext context =
-      new JSONJAXBContext(JSONConfiguration.natural().build(),
-        FilterModel.class);
-    JSONMarshaller marshaller = context.createJSONMarshaller();
-    StringWriter writer = new StringWriter();
-    marshaller.marshallToJSON(new FilterModel(filter), writer);
-    return writer.toString();
-  }
-
-  private static final byte[] COLUMN_DIVIDER = Bytes.toBytes(":");
-
-  /**
-   * @param scan the scan specification
-   * @throws Exception 
-   */
-  public static ScannerModel fromScan(Scan scan) throws Exception {
-    ScannerModel model = new ScannerModel();
-    model.setStartRow(scan.getStartRow());
-    model.setEndRow(scan.getStopRow());
-    Map<byte [], NavigableSet<byte []>> families = scan.getFamilyMap();
-    if (families != null) {
-      for (Map.Entry<byte [], NavigableSet<byte []>> entry : families.entrySet()) {
-        if (entry.getValue() != null) {
-          for (byte[] qualifier: entry.getValue()) {
-            model.addColumn(Bytes.add(entry.getKey(), COLUMN_DIVIDER, qualifier));
-          }
-        } else {
-          model.addColumn(entry.getKey());
-        }
-      }
-    }
-    model.setStartTime(scan.getTimeRange().getMin());
-    model.setEndTime(scan.getTimeRange().getMax());
-    int caching = scan.getCaching();
-    if (caching > 0) {
-      model.setCaching(caching);
-    }
-    int batch = scan.getBatch();
-    if (batch > 0) {
-      model.setBatch(batch);
-    }
-    int maxVersions = scan.getMaxVersions();
-    if (maxVersions > 0) {
-      model.setMaxVersions(maxVersions);
-    }
-    Filter filter = scan.getFilter();
-    if (filter != null) {
-      model.setFilter(stringifyFilter(filter));
-    }
-    // Add the visbility labels if found in the attributes
-    Authorizations authorizations = scan.getAuthorizations();
-    if (authorizations != null) {
-      List<String> labels = authorizations.getLabels();
-      for (String label : labels) {
-        model.addLabel(label);
-      }
-    }
-    return model;
-  }
-
-  /**
-   * Default constructor
-   */
-  public ScannerModel() {}
-
-  /**
-   * Constructor
-   * @param startRow the start key of the row-range
-   * @param endRow the end key of the row-range
-   * @param columns the columns to scan
-   * @param batch the number of values to return in batch
-   * @param caching the number of rows that the scanner will fetch at once
-   * @param endTime the upper bound on timestamps of values of interest
-   * @param maxVersions the maximum number of versions to return
-   * @param filter a filter specification
-   * (values with timestamps later than this are excluded)
-   */
-  public ScannerModel(byte[] startRow, byte[] endRow, List<byte[]> columns,
-      int batch, int caching, long endTime, int maxVersions, String filter) {
-    super();
-    this.startRow = startRow;
-    this.endRow = endRow;
-    this.columns = columns;
-    this.batch = batch;
-    this.caching = caching;
-    this.endTime = endTime;
-    this.maxVersions = maxVersions;
-    this.filter = filter;
-  }
-
-  /**
-   * Constructor 
-   * @param startRow the start key of the row-range
-   * @param endRow the end key of the row-range
-   * @param columns the columns to scan
-   * @param batch the number of values to return in batch
-   * @param caching the number of rows that the scanner will fetch at once
-   * @param startTime the lower bound on timestamps of values of interest
-   * (values with timestamps earlier than this are excluded)
-   * @param endTime the upper bound on timestamps of values of interest
-   * (values with timestamps later than this are excluded)
-   * @param filter a filter specification
-   */
-  public ScannerModel(byte[] startRow, byte[] endRow, List<byte[]> columns,
-      int batch, int caching, long startTime, long endTime, String filter) {
-    super();
-    this.startRow = startRow;
-    this.endRow = endRow;
-    this.columns = columns;
-    this.batch = batch;
-    this.caching = caching;
-    this.startTime = startTime;
-    this.endTime = endTime;
-    this.filter = filter;
-  }
-
-  /**
-   * Add a column to the column set
-   * @param column the column name, as &lt;column&gt;(:&lt;qualifier&gt;)?
-   */
-  public void addColumn(byte[] column) {
-    columns.add(column);
-  }
-  
-  /**
-   * Add a visibility label to the scan
-   */
-  public void addLabel(String label) {
-    labels.add(label);
-  }
-  /**
-   * @return true if a start row was specified
-   */
-  public boolean hasStartRow() {
-    return !Bytes.equals(startRow, HConstants.EMPTY_START_ROW);
-  }
-
-  /**
-   * @return start row
-   */
-  @XmlAttribute
-  public byte[] getStartRow() {
-    return startRow;
-  }
-
-  /**
-   * @return true if an end row was specified
-   */
-  public boolean hasEndRow() {
-    return !Bytes.equals(endRow, HConstants.EMPTY_END_ROW);
-  }
-
-  /**
-   * @return end row
-   */
-  @XmlAttribute
-  public byte[] getEndRow() {
-    return endRow;
-  }
-
-  /**
-   * @return list of columns of interest in column:qualifier format, or empty for all
-   */
-  @XmlElement(name="column")
-  public List<byte[]> getColumns() {
-    return columns;
-  }
-  
-  @XmlElement(name="labels")
-  public List<String> getLabels() {
-    return labels;
-  }
-
-  /**
-   * @return the number of cells to return in batch
-   */
-  @XmlAttribute
-  public int getBatch() {
-    return batch;
-  }
-
-  /**
-   * @return the number of rows that the scanner to fetch at once
-   */
-  @XmlAttribute
-  public int getCaching() {
-    return caching;
-  }
-
-  /**
-   * @return true if HFile blocks should be cached on the servers for this scan, false otherwise
-   */
-  @XmlAttribute
-  public boolean getCacheBlocks() {
-    return cacheBlocks;
-  }
-
-  /**
-   * @return the lower bound on timestamps of items of interest
-   */
-  @XmlAttribute
-  public long getStartTime() {
-    return startTime;
-  }
-
-  /**
-   * @return the upper bound on timestamps of items of interest
-   */
-  @XmlAttribute
-  public long getEndTime() {
-    return endTime;
-  }
-
-  /**
-   * @return maximum number of versions to return
-   */
-  @XmlAttribute
-  public int getMaxVersions() {
-    return maxVersions;
-  }
-
-  /**
-   * @return the filter specification
-   */
-  @XmlElement
-  public String getFilter() {
-    return filter;
-  }
-
-  /**
-   * @param startRow start row
-   */
-  public void setStartRow(byte[] startRow) {
-    this.startRow = startRow;
-  }
-
-  /**
-   * @param endRow end row
-   */
-  public void setEndRow(byte[] endRow) {
-    this.endRow = endRow;
-  }
-
-  /**
-   * @param columns list of columns of interest in column:qualifier format, or empty for all
-   */
-  public void setColumns(List<byte[]> columns) {
-    this.columns = columns;
-  }
-
-  /**
-   * @param batch the number of cells to return in batch
-   */
-  public void setBatch(int batch) {
-    this.batch = batch;
-  }
-
-  /**
-   * @param caching the number of rows to fetch at once
-   */
-  public void setCaching(int caching) {
-    this.caching = caching;
-  }
-
-  /**
-   * @param value true if HFile blocks should be cached on the servers for this scan, false otherwise
-   */
-  public void setCacheBlocks(boolean value) {
-    this.cacheBlocks = value;
-  }
-
-  /**
-   * @param maxVersions maximum number of versions to return
-   */
-  public void setMaxVersions(int maxVersions) {
-    this.maxVersions = maxVersions;
-  }
-
-  /**
-   * @param startTime the lower bound on timestamps of values of interest
-   */
-  public void setStartTime(long startTime) {
-    this.startTime = startTime;
-  }
-
-  /**
-   * @param endTime the upper bound on timestamps of values of interest
-   */
-  public void setEndTime(long endTime) {
-    this.endTime = endTime;
-  }
-
-  /**
-   * @param filter the filter specification
-   */
-  public void setFilter(String filter) {
-    this.filter = filter;
-  }
-
-  @Override
-  public byte[] createProtobufOutput() {
-    Scanner.Builder builder = Scanner.newBuilder();
-    if (!Bytes.equals(startRow, HConstants.EMPTY_START_ROW)) {
-      builder.setStartRow(ByteStringer.wrap(startRow));
-    }
-    if (!Bytes.equals(endRow, HConstants.EMPTY_START_ROW)) {
-      builder.setEndRow(ByteStringer.wrap(endRow));
-    }
-    for (byte[] column: columns) {
-      builder.addColumns(ByteStringer.wrap(column));
-    }
-    if (startTime != 0) {
-      builder.setStartTime(startTime);
-    }
-    if (endTime != 0) {
-      builder.setEndTime(endTime);
-    }
-    builder.setBatch(getBatch());
-    if (caching > 0) {
-      builder.setCaching(caching);
-    }
-    builder.setMaxVersions(maxVersions);
-    if (filter != null) {
-      builder.setFilter(filter);
-    }
-    if (labels != null && labels.size() > 0) {
-      for (String label : labels)
-        builder.addLabels(label);
-    }
-    builder.setCacheBlocks(cacheBlocks);
-    return builder.build().toByteArray();
-  }
-
-  @Override
-  public ProtobufMessageHandler getObjectFromMessage(byte[] message)
-      throws IOException {
-    Scanner.Builder builder = Scanner.newBuilder();
-    builder.mergeFrom(message);
-    if (builder.hasStartRow()) {
-      startRow = builder.getStartRow().toByteArray();
-    }
-    if (builder.hasEndRow()) {
-      endRow = builder.getEndRow().toByteArray();
-    }
-    for (ByteString column: builder.getColumnsList()) {
-      addColumn(column.toByteArray());
-    }
-    if (builder.hasBatch()) {
-      batch = builder.getBatch();
-    }
-    if (builder.hasCaching()) {
-      caching = builder.getCaching();
-    }
-    if (builder.hasStartTime()) {
-      startTime = builder.getStartTime();
-    }
-    if (builder.hasEndTime()) {
-      endTime = builder.getEndTime();
-    }
-    if (builder.hasMaxVersions()) {
-      maxVersions = builder.getMaxVersions();
-    }
-    if (builder.hasFilter()) {
-      filter = builder.getFilter();
-    }
-    if (builder.getLabelsList() != null) {
-      List<String> labels = builder.getLabelsList();
-      for(String label :  labels) {
-        addLabel(label);
-      }
-    }
-    if (builder.hasCacheBlocks()) {
-      this.cacheBlocks = builder.getCacheBlocks();
-    }
-    return this;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
deleted file mode 100644
index 3b044e7..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
+++ /dev/null
@@ -1,790 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest.model;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlAttribute;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlElementWrapper;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.hadoop.hbase.util.ByteStringer;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
-import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus;
-import org.apache.hadoop.hbase.util.Bytes;
-
-/**
- * Representation of the status of a storage cluster:
- * <p>
- * <ul>
- * <li>regions: the total number of regions served by the cluster</li>
- * <li>requests: the total number of requests per second handled by the
- * cluster in the last reporting interval</li>
- * <li>averageLoad: the average load of the region servers in the cluster</li>
- * <li>liveNodes: detailed status of the live region servers</li>
- * <li>deadNodes: the names of region servers declared dead</li>
- * </ul>
- * 
- * <pre>
- * &lt;complexType name="StorageClusterStatus"&gt;
- *   &lt;sequence&gt;
- *     &lt;element name="liveNode" type="tns:Node"
- *       maxOccurs="unbounded" minOccurs="0"&gt;
- *     &lt;/element&gt;
- *     &lt;element name="deadNode" type="string" maxOccurs="unbounded"
- *       minOccurs="0"&gt;
- *     &lt;/element&gt;
- *   &lt;/sequence&gt;
- *   &lt;attribute name="regions" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="requests" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="averageLoad" type="float"&gt;&lt;/attribute&gt;
- * &lt;/complexType&gt;
- *
- * &lt;complexType name="Node"&gt;
- *   &lt;sequence&gt;
- *     &lt;element name="region" type="tns:Region" 
- *       maxOccurs="unbounded" minOccurs="0"&gt;&lt;/element&gt;
- *   &lt;/sequence&gt;
- *   &lt;attribute name="name" type="string"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="startCode" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="requests" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="heapSizeMB" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="maxHeapSizeMB" type="int"&gt;&lt;/attribute&gt;
- * &lt;/complexType&gt;
- *
- * &lt;complexType name="Region"&gt;
- *   &lt;attribute name="name" type="base64Binary"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="stores" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="storefiles" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="storefileSizeMB" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="memstoreSizeMB" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="storefileIndexSizeMB" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="readRequestsCount" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="writeRequestsCount" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="rootIndexSizeKB" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="totalStaticIndexSizeKB" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="totalStaticBloomSizeKB" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="totalCompactingKVs" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="currentCompactedKVs" type="int"&gt;&lt;/attribute&gt;
- * &lt;/complexType&gt;
- * </pre>
- */
-@XmlRootElement(name="ClusterStatus")
-@InterfaceAudience.Private
-public class StorageClusterStatusModel 
-    implements Serializable, ProtobufMessageHandler {
-  private static final long serialVersionUID = 1L;
-
-  /**
-   * Represents a region server.
-   */
-  public static class Node implements Serializable {
-    private static final long serialVersionUID = 1L;
-
-    /**
-     * Represents a region hosted on a region server.
-     */
-    public static class Region {
-      private byte[] name;
-      private int stores;
-      private int storefiles;
-      private int storefileSizeMB;
-      private int memstoreSizeMB;
-      private int storefileIndexSizeMB;
-      private long readRequestsCount;
-      private long writeRequestsCount;
-      private int rootIndexSizeKB;
-      private int totalStaticIndexSizeKB;
-      private int totalStaticBloomSizeKB;
-      private long totalCompactingKVs;
-      private long currentCompactedKVs;
-
-      /**
-       * Default constructor
-       */
-      public Region() {
-      }
-
-      /**
-       * Constructor
-       * @param name the region name
-       */
-      public Region(byte[] name) {
-        this.name = name;
-      }
-
-      /**
-       * Constructor
-       * @param name the region name
-       * @param stores the number of stores
-       * @param storefiles the number of store files
-       * @param storefileSizeMB total size of store files, in MB
-       * @param memstoreSizeMB total size of memstore, in MB
-       * @param storefileIndexSizeMB total size of store file indexes, in MB
-       */
-      public Region(byte[] name, int stores, int storefiles,
-          int storefileSizeMB, int memstoreSizeMB, int storefileIndexSizeMB,
-          long readRequestsCount, long writeRequestsCount, int rootIndexSizeKB,
-          int totalStaticIndexSizeKB, int totalStaticBloomSizeKB,
-          long totalCompactingKVs, long currentCompactedKVs) {
-        this.name = name;
-        this.stores = stores;
-        this.storefiles = storefiles;
-        this.storefileSizeMB = storefileSizeMB;
-        this.memstoreSizeMB = memstoreSizeMB;
-        this.storefileIndexSizeMB = storefileIndexSizeMB;
-        this.readRequestsCount = readRequestsCount;
-        this.writeRequestsCount = writeRequestsCount;
-        this.rootIndexSizeKB = rootIndexSizeKB;
-        this.totalStaticIndexSizeKB = totalStaticIndexSizeKB;
-        this.totalStaticBloomSizeKB = totalStaticBloomSizeKB;
-        this.totalCompactingKVs = totalCompactingKVs;
-        this.currentCompactedKVs = currentCompactedKVs;
-      }
-
-      /**
-       * @return the region name
-       */
-      @XmlAttribute
-      public byte[] getName() {
-        return name;
-      }
-
-      /**
-       * @return the number of stores
-       */
-      @XmlAttribute
-      public int getStores() {
-        return stores;
-      }
-
-      /**
-       * @return the number of store files 
-       */
-      @XmlAttribute
-      public int getStorefiles() {
-        return storefiles;
-      }
-
-      /**
-       * @return the total size of store files, in MB
-       */
-      @XmlAttribute
-      public int getStorefileSizeMB() {
-        return storefileSizeMB;
-      }
-
-      /**
-       * @return memstore size, in MB
-       */
-      @XmlAttribute
-      public int getMemstoreSizeMB() {
-        return memstoreSizeMB;
-      }
-
-      /**
-       * @return the total size of store file indexes, in MB
-       */
-      @XmlAttribute
-      public int getStorefileIndexSizeMB() {
-        return storefileIndexSizeMB;
-      }
-
-      /**
-       * @return the current total read requests made to region
-       */
-      @XmlAttribute
-      public long getReadRequestsCount() {
-        return readRequestsCount;
-      }
-
-      /**
-       * @return the current total write requests made to region
-       */
-      @XmlAttribute
-      public long getWriteRequestsCount() {
-        return writeRequestsCount;
-      }
-
-      /**
-       * @return The current total size of root-level indexes for the region, in KB.
-       */
-      @XmlAttribute
-      public int getRootIndexSizeKB() {
-        return rootIndexSizeKB;
-      }
-
-      /**
-       * @return The total size of static index, in KB
-       */
-      @XmlAttribute
-      public int getTotalStaticIndexSizeKB() {
-        return totalStaticIndexSizeKB;
-      }
-
-      /**
-       * @return The total size of static bloom, in KB
-       */
-      @XmlAttribute
-      public int getTotalStaticBloomSizeKB() {
-        return totalStaticBloomSizeKB;
-      }
-
-      /**
-       * @return The total number of compacting key-values 
-       */
-      @XmlAttribute
-      public long getTotalCompactingKVs() {
-        return totalCompactingKVs;
-      }
-
-      /**
-       * @return The number of current compacted key-values
-       */
-      @XmlAttribute
-      public long getCurrentCompactedKVs() {
-        return currentCompactedKVs;
-      }
-
-      /**
-       * @param readRequestsCount The current total read requests made to region
-       */
-      public void setReadRequestsCount(long readRequestsCount) {
-        this.readRequestsCount = readRequestsCount;
-      }
-
-      /**
-       * @param rootIndexSizeKB The current total size of root-level indexes
-       *                        for the region, in KB
-       */
-      public void setRootIndexSizeKB(int rootIndexSizeKB) {
-        this.rootIndexSizeKB = rootIndexSizeKB;
-      }
-
-      /**
-       * @param writeRequestsCount The current total write requests made to region
-       */
-      public void setWriteRequestsCount(long writeRequestsCount) {
-        this.writeRequestsCount = writeRequestsCount;
-      }
-
-      /**
-       * @param currentCompactedKVs The completed count of key values
-       *                            in currently running compaction
-       */
-      public void setCurrentCompactedKVs(long currentCompactedKVs) {
-        this.currentCompactedKVs = currentCompactedKVs;
-      }
-
-      /**
-       * @param totalCompactingKVs The total compacting key values
-       *                           in currently running compaction
-       */
-      public void setTotalCompactingKVs(long totalCompactingKVs) {
-        this.totalCompactingKVs = totalCompactingKVs;
-      }
-
-      /**
-       * @param totalStaticBloomSizeKB The total size of all Bloom filter blocks,
-       *                               not just loaded into the block cache, in KB.
-       */
-      public void setTotalStaticBloomSizeKB(int totalStaticBloomSizeKB) {
-        this.totalStaticBloomSizeKB = totalStaticBloomSizeKB;
-      }
-
-      /**
-       * @param totalStaticIndexSizeKB The total size of all index blocks,
-       *                               not just the root level, in KB.
-       */
-      public void setTotalStaticIndexSizeKB(int totalStaticIndexSizeKB) {
-        this.totalStaticIndexSizeKB = totalStaticIndexSizeKB;
-      }
-
-      /**
-       * @param name the region name
-       */
-      public void setName(byte[] name) {
-        this.name = name;
-      }
-
-      /**
-       * @param stores the number of stores
-       */
-      public void setStores(int stores) {
-        this.stores = stores;
-      }
-
-      /**
-       * @param storefiles the number of store files
-       */
-      public void setStorefiles(int storefiles) {
-        this.storefiles = storefiles;
-      }
-
-      /**
-       * @param storefileSizeMB total size of store files, in MB
-       */
-      public void setStorefileSizeMB(int storefileSizeMB) {
-        this.storefileSizeMB = storefileSizeMB;
-      }
-
-      /**
-       * @param memstoreSizeMB memstore size, in MB
-       */
-      public void setMemstoreSizeMB(int memstoreSizeMB) {
-        this.memstoreSizeMB = memstoreSizeMB;
-      }
-
-      /**
-       * @param storefileIndexSizeMB total size of store file indexes, in MB
-       */
-      public void setStorefileIndexSizeMB(int storefileIndexSizeMB) {
-        this.storefileIndexSizeMB = storefileIndexSizeMB;
-      }
-    }
-
-    private String name;
-    private long startCode;
-    private int requests;
-    private int heapSizeMB;
-    private int maxHeapSizeMB;
-    private List<Region> regions = new ArrayList<Region>();
-
-    /**
-     * Add a region name to the list
-     * @param name the region name
-     */
-    public void addRegion(byte[] name, int stores, int storefiles,
-        int storefileSizeMB, int memstoreSizeMB, int storefileIndexSizeMB,
-        long readRequestsCount, long writeRequestsCount, int rootIndexSizeKB,
-        int totalStaticIndexSizeKB, int totalStaticBloomSizeKB,
-        long totalCompactingKVs, long currentCompactedKVs) { 
-      regions.add(new Region(name, stores, storefiles, storefileSizeMB,
-        memstoreSizeMB, storefileIndexSizeMB, readRequestsCount,
-        writeRequestsCount, rootIndexSizeKB, totalStaticIndexSizeKB,
-        totalStaticBloomSizeKB, totalCompactingKVs, currentCompactedKVs));
-    }
-
-    /**
-     * @param index the index
-     * @return the region name
-     */
-    public Region getRegion(int index) {
-      return regions.get(index);
-    }
-
-    /**
-     * Default constructor
-     */
-    public Node() {}
-
-    /**
-     * Constructor
-     * @param name the region server name
-     * @param startCode the region server's start code
-     */
-    public Node(String name, long startCode) {
-      this.name = name;
-      this.startCode = startCode;
-    }
-
-    /**
-     * @return the region server's name
-     */
-    @XmlAttribute
-    public String getName() {
-      return name;
-    }
-
-    /**
-     * @return the region server's start code
-     */
-    @XmlAttribute
-    public long getStartCode() {
-      return startCode;
-    }
-
-    /**
-     * @return the current heap size, in MB
-     */
-    @XmlAttribute
-    public int getHeapSizeMB() {
-      return heapSizeMB;
-    }
-
-    /**
-     * @return the maximum heap size, in MB
-     */
-    @XmlAttribute
-    public int getMaxHeapSizeMB() {
-      return maxHeapSizeMB;
-    }
-
-    /**
-     * @return the list of regions served by the region server
-     */
-    @XmlElement(name="Region")
-    public List<Region> getRegions() {
-      return regions;
-    }
-
-    /**
-     * @return the number of requests per second processed by the region server
-     */
-    @XmlAttribute
-    public int getRequests() {
-      return requests;
-    }
-
-    /**
-     * @param name the region server's hostname
-     */
-    public void setName(String name) {
-      this.name = name;
-    }
-
-    /**
-     * @param startCode the region server's start code
-     */
-    public void setStartCode(long startCode) {
-      this.startCode = startCode;
-    }
-
-    /**
-     * @param heapSizeMB the current heap size, in MB
-     */
-    public void setHeapSizeMB(int heapSizeMB) {
-      this.heapSizeMB = heapSizeMB;
-    }
-
-    /**
-     * @param maxHeapSizeMB the maximum heap size, in MB
-     */
-    public void setMaxHeapSizeMB(int maxHeapSizeMB) {
-      this.maxHeapSizeMB = maxHeapSizeMB;
-    }
-
-    /**
-     * @param regions a list of regions served by the region server
-     */
-    public void setRegions(List<Region> regions) {
-      this.regions = regions;
-    }
-
-    /**
-     * @param requests the number of requests per second processed by the
-     * region server
-     */
-    public void setRequests(int requests) {
-      this.requests = requests;
-    }
-  }
-
-  private List<Node> liveNodes = new ArrayList<Node>();
-  private List<String> deadNodes = new ArrayList<String>();
-  private int regions;
-  private int requests;
-  private double averageLoad;
-
-  /**
-   * Add a live node to the cluster representation.
-   * @param name the region server name
-   * @param startCode the region server's start code
-   * @param heapSizeMB the current heap size, in MB
-   * @param maxHeapSizeMB the maximum heap size, in MB
-   */
-  public Node addLiveNode(String name, long startCode, int heapSizeMB, int maxHeapSizeMB) {
-    Node node = new Node(name, startCode);
-    node.setHeapSizeMB(heapSizeMB);
-    node.setMaxHeapSizeMB(maxHeapSizeMB);
-    liveNodes.add(node);
-    return node;
-  }
-
-  /**
-   * @param index the index
-   * @return the region server model
-   */
-  public Node getLiveNode(int index) {
-    return liveNodes.get(index);
-  }
-
-  /**
-   * Add a dead node to the cluster representation.
-   * @param node the dead region server's name
-   */
-  public void addDeadNode(String node) {
-    deadNodes.add(node);
-  }
-
-  /**
-   * @param index the index
-   * @return the dead region server's name
-   */
-  public String getDeadNode(int index) {
-    return deadNodes.get(index);
-  }
-
-  /**
-   * Default constructor
-   */
-  public StorageClusterStatusModel() {
-  }
-
-  /**
-   * @return the list of live nodes
-   */
-  @XmlElement(name = "Node")
-  @XmlElementWrapper(name = "LiveNodes")
-  public List<Node> getLiveNodes() {
-    return liveNodes;
-  }
-
-  /**
-   * @return the list of dead nodes
-   */
-  @XmlElement(name = "Node")
-  @XmlElementWrapper(name = "DeadNodes")
-  public List<String> getDeadNodes() {
-    return deadNodes;
-  }
-
-  /**
-   * @return the total number of regions served by the cluster
-   */
-  @XmlAttribute
-  public int getRegions() {
-    return regions;
-  }
-
-  /**
-   * @return the total number of requests per second handled by the cluster in
-   * the last reporting interval
-   */
-  @XmlAttribute
-  public int getRequests() {
-    return requests;
-  }
-
-  /**
-   * @return the average load of the region servers in the cluster
-   */
-  @XmlAttribute
-  public double getAverageLoad() {
-    return averageLoad;
-  }
-
-  /**
-   * @param nodes the list of live node models
-   */
-  public void setLiveNodes(List<Node> nodes) {
-    this.liveNodes = nodes;
-  }
-
-  /**
-   * @param nodes the list of dead node names
-   */
-  public void setDeadNodes(List<String> nodes) {
-    this.deadNodes = nodes;
-  }
-
-  /**
-   * @param regions the total number of regions served by the cluster
-   */
-  public void setRegions(int regions) {
-    this.regions = regions;
-  }
-
-  /**
-   * @param requests the total number of requests per second handled by the
-   * cluster
-   */
-  public void setRequests(int requests) {
-    this.requests = requests;
-  }
-
-  /**
-   * @param averageLoad the average load of region servers in the cluster
-   */
-  public void setAverageLoad(double averageLoad) {
-    this.averageLoad = averageLoad;
-  }
-
-  /*
-   * (non-Javadoc)
-   * @see java.lang.Object#toString()
-   */
-  @Override
-  public String toString() {
-    StringBuilder sb = new StringBuilder();
-    sb.append(String.format("%d live servers, %d dead servers, " + 
-      "%.4f average load%n%n", liveNodes.size(), deadNodes.size(),
-      averageLoad));
-    if (!liveNodes.isEmpty()) {
-      sb.append(liveNodes.size());
-      sb.append(" live servers\n");
-      for (Node node: liveNodes) {
-        sb.append("    ");
-        sb.append(node.name);
-        sb.append(' ');
-        sb.append(node.startCode);
-        sb.append("\n        requests=");
-        sb.append(node.requests);
-        sb.append(", regions=");
-        sb.append(node.regions.size());
-        sb.append("\n        heapSizeMB=");
-        sb.append(node.heapSizeMB);
-        sb.append("\n        maxHeapSizeMB=");
-        sb.append(node.maxHeapSizeMB);
-        sb.append("\n\n");
-        for (Node.Region region: node.regions) {
-          sb.append("        ");
-          sb.append(Bytes.toString(region.name));
-          sb.append("\n            stores=");
-          sb.append(region.stores);
-          sb.append("\n            storefiless=");
-          sb.append(region.storefiles);
-          sb.append("\n            storefileSizeMB=");
-          sb.append(region.storefileSizeMB);
-          sb.append("\n            memstoreSizeMB=");
-          sb.append(region.memstoreSizeMB);
-          sb.append("\n            storefileIndexSizeMB=");
-          sb.append(region.storefileIndexSizeMB);
-          sb.append("\n            readRequestsCount=");
-          sb.append(region.readRequestsCount);
-          sb.append("\n            writeRequestsCount=");
-          sb.append(region.writeRequestsCount);
-          sb.append("\n            rootIndexSizeKB=");
-          sb.append(region.rootIndexSizeKB);
-          sb.append("\n            totalStaticIndexSizeKB=");
-          sb.append(region.totalStaticIndexSizeKB);
-          sb.append("\n            totalStaticBloomSizeKB=");
-          sb.append(region.totalStaticBloomSizeKB);
-          sb.append("\n            totalCompactingKVs=");
-          sb.append(region.totalCompactingKVs);
-          sb.append("\n            currentCompactedKVs=");
-          sb.append(region.currentCompactedKVs);
-          sb.append('\n');
-        }
-        sb.append('\n');
-      }
-    }
-    if (!deadNodes.isEmpty()) {
-      sb.append('\n');
-      sb.append(deadNodes.size());
-      sb.append(" dead servers\n");
-      for (String node: deadNodes) {
-        sb.append("    ");
-        sb.append(node);
-        sb.append('\n');
-      }
-    }
-    return sb.toString();
-  }
-
-  @Override
-  public byte[] createProtobufOutput() {
-    StorageClusterStatus.Builder builder = StorageClusterStatus.newBuilder();
-    builder.setRegions(regions);
-    builder.setRequests(requests);
-    builder.setAverageLoad(averageLoad);
-    for (Node node: liveNodes) {
-      StorageClusterStatus.Node.Builder nodeBuilder = 
-        StorageClusterStatus.Node.newBuilder();
-      nodeBuilder.setName(node.name);
-      nodeBuilder.setStartCode(node.startCode);
-      nodeBuilder.setRequests(node.requests);
-      nodeBuilder.setHeapSizeMB(node.heapSizeMB);
-      nodeBuilder.setMaxHeapSizeMB(node.maxHeapSizeMB);
-      for (Node.Region region: node.regions) {
-        StorageClusterStatus.Region.Builder regionBuilder =
-          StorageClusterStatus.Region.newBuilder();
-        regionBuilder.setName(ByteStringer.wrap(region.name));
-        regionBuilder.setStores(region.stores);
-        regionBuilder.setStorefiles(region.storefiles);
-        regionBuilder.setStorefileSizeMB(region.storefileSizeMB);
-        regionBuilder.setMemstoreSizeMB(region.memstoreSizeMB);
-        regionBuilder.setStorefileIndexSizeMB(region.storefileIndexSizeMB);
-        regionBuilder.setReadRequestsCount(region.readRequestsCount);
-        regionBuilder.setWriteRequestsCount(region.writeRequestsCount);
-        regionBuilder.setRootIndexSizeKB(region.rootIndexSizeKB);
-        regionBuilder.setTotalStaticIndexSizeKB(region.totalStaticIndexSizeKB);
-        regionBuilder.setTotalStaticBloomSizeKB(region.totalStaticBloomSizeKB);
-        regionBuilder.setTotalCompactingKVs(region.totalCompactingKVs);
-        regionBuilder.setCurrentCompactedKVs(region.currentCompactedKVs);
-        nodeBuilder.addRegions(regionBuilder);
-      }
-      builder.addLiveNodes(nodeBuilder);
-    }
-    for (String node: deadNodes) {
-      builder.addDeadNodes(node);
-    }
-    return builder.build().toByteArray();
-  }
-
-  @Override
-  public ProtobufMessageHandler getObjectFromMessage(byte[] message)
-      throws IOException {
-    StorageClusterStatus.Builder builder = StorageClusterStatus.newBuilder();
-    builder.mergeFrom(message);
-    if (builder.hasRegions()) {
-      regions = builder.getRegions();
-    }
-    if (builder.hasRequests()) {
-      requests = builder.getRequests();
-    }
-    if (builder.hasAverageLoad()) {
-      averageLoad = builder.getAverageLoad();
-    }
-    for (StorageClusterStatus.Node node: builder.getLiveNodesList()) {
-      long startCode = node.hasStartCode() ? node.getStartCode() : -1;
-      StorageClusterStatusModel.Node nodeModel = 
-        addLiveNode(node.getName(), startCode, node.getHeapSizeMB(),
-          node.getMaxHeapSizeMB());
-      int requests = node.hasRequests() ? node.getRequests() : 0;
-      nodeModel.setRequests(requests);
-      for (StorageClusterStatus.Region region: node.getRegionsList()) {
-        nodeModel.addRegion(
-          region.getName().toByteArray(),
-          region.getStores(),
-          region.getStorefiles(),
-          region.getStorefileSizeMB(),
-          region.getMemstoreSizeMB(),
-          region.getStorefileIndexSizeMB(),
-          region.getReadRequestsCount(),
-          region.getWriteRequestsCount(),
-          region.getRootIndexSizeKB(),
-          region.getTotalStaticIndexSizeKB(),
-          region.getTotalStaticBloomSizeKB(),
-          region.getTotalCompactingKVs(),
-          region.getCurrentCompactedKVs());
-      }
-    }
-    for (String node: builder.getDeadNodesList()) {
-      addDeadNode(node);
-    }
-    return this;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
deleted file mode 100644
index 4321a8e..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest.model;
-
-import org.codehaus.jackson.annotate.JsonValue;
-
-import java.io.Serializable;
-
-import javax.xml.bind.annotation.XmlRootElement;
-import javax.xml.bind.annotation.XmlValue;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-
-/**
- * Simple representation of the version of the storage cluster
- * 
- * <pre>
- * &lt;complexType name="StorageClusterVersion"&gt;
- *   &lt;attribute name="version" type="string"&gt;&lt;/attribute&gt;
- * &lt;/complexType&gt;
- * </pre>
- */
-@XmlRootElement(name="ClusterVersion")
-@InterfaceAudience.Private
-public class StorageClusterVersionModel implements Serializable {
-	private static final long serialVersionUID = 1L;
-
-	private String version;
-
-	/**
-	 * @return the storage cluster version
-	 */
-	@XmlValue
-	public String getVersion() {
-	  return version;
-	}
-
-	/**
-	 * @param version the storage cluster version
-	 */
-	public void setVersion(String version) {
-	  this.version = version;
-	}
-
-	/* (non-Javadoc)
-	 * @see java.lang.Object#toString()
-	 */
-  @JsonValue
-	@Override
-	public String toString() {
-	  return version;
-	}
-
-    //needed for jackson deserialization
-    private static StorageClusterVersionModel valueOf(String value) {
-      StorageClusterVersionModel versionModel
-          = new StorageClusterVersionModel();
-      versionModel.setVersion(value);
-      return versionModel;
-    }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java
deleted file mode 100644
index 700e766..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest.model;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlAttribute;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.hadoop.hbase.util.ByteStringer;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
-import org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo;
-
-/**
- * Representation of a list of table regions. 
- * 
- * <pre>
- * &lt;complexType name="TableInfo"&gt;
- *   &lt;sequence&gt;
- *     &lt;element name="region" type="tns:TableRegion" 
- *       maxOccurs="unbounded" minOccurs="1"&gt;&lt;/element&gt;
- *   &lt;/sequence&gt;
- *   &lt;attribute name="name" type="string"&gt;&lt;/attribute&gt;
- * &lt;/complexType&gt;
- * </pre>
- */
-@XmlRootElement(name="TableInfo")
-@InterfaceAudience.Private
-public class TableInfoModel implements Serializable, ProtobufMessageHandler {
-  private static final long serialVersionUID = 1L;
-
-  private String name;
-  private List<TableRegionModel> regions = new ArrayList<TableRegionModel>();
-
-  /**
-   * Default constructor
-   */
-  public TableInfoModel() {}
-
-  /**
-   * Constructor
-   * @param name
-   */
-  public TableInfoModel(String name) {
-    this.name = name;
-  }
-
-  /**
-   * Add a region model to the list
-   * @param region the region
-   */
-  public void add(TableRegionModel region) {
-    regions.add(region);
-  }
-
-  /**
-   * @param index the index
-   * @return the region model
-   */
-  public TableRegionModel get(int index) {
-    return regions.get(index);
-  }
-
-  /**
-   * @return the table name
-   */
-  @XmlAttribute
-  public String getName() {
-    return name;
-  }
-
-  /**
-   * @return the regions
-   */
-  @XmlElement(name="Region")
-  public List<TableRegionModel> getRegions() {
-    return regions;
-  }
-
-  /**
-   * @param name the table name
-   */
-  public void setName(String name) {
-    this.name = name;
-  }
-
-  /**
-   * @param regions the regions to set
-   */
-  public void setRegions(List<TableRegionModel> regions) {
-    this.regions = regions;
-  }
-
-  /* (non-Javadoc)
-   * @see java.lang.Object#toString()
-   */
-  @Override
-  public String toString() {
-    StringBuilder sb = new StringBuilder();
-    for(TableRegionModel aRegion : regions) {
-      sb.append(aRegion.toString());
-      sb.append('\n');
-    }
-    return sb.toString();
-  }
-
-  @Override
-  public byte[] createProtobufOutput() {
-    TableInfo.Builder builder = TableInfo.newBuilder();
-    builder.setName(name);
-    for (TableRegionModel aRegion: regions) {
-      TableInfo.Region.Builder regionBuilder = TableInfo.Region.newBuilder();
-      regionBuilder.setName(aRegion.getName());
-      regionBuilder.setId(aRegion.getId());
-      regionBuilder.setStartKey(ByteStringer.wrap(aRegion.getStartKey()));
-      regionBuilder.setEndKey(ByteStringer.wrap(aRegion.getEndKey()));
-      regionBuilder.setLocation(aRegion.getLocation());
-      builder.addRegions(regionBuilder);
-    }
-    return builder.build().toByteArray();
-  }
-
-  @Override
-  public ProtobufMessageHandler getObjectFromMessage(byte[] message) 
-      throws IOException {
-    TableInfo.Builder builder = TableInfo.newBuilder();
-    builder.mergeFrom(message);
-    setName(builder.getName());
-    for (TableInfo.Region region: builder.getRegionsList()) {
-      add(new TableRegionModel(builder.getName(), region.getId(), 
-          region.getStartKey().toByteArray(),
-          region.getEndKey().toByteArray(),
-          region.getLocation()));
-    }
-    return this;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java
deleted file mode 100644
index 596adac..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest.model;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlElementRef;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
-import org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList;
-
-/**
- * Simple representation of a list of table names.
- */
-@XmlRootElement(name="TableList")
-@InterfaceAudience.Private
-public class TableListModel implements Serializable, ProtobufMessageHandler {
-
-	private static final long serialVersionUID = 1L;
-
-	private List<TableModel> tables = new ArrayList<TableModel>();
-
-	/**
-	 * Default constructor
-	 */
-	public TableListModel() {}
-
-	/**
-	 * Add the table name model to the list
-	 * @param table the table model
-	 */
-	public void add(TableModel table) {
-		tables.add(table);
-	}
-	
-	/**
-	 * @param index the index
-	 * @return the table model
-	 */
-	public TableModel get(int index) {
-		return tables.get(index);
-	}
-
-	/**
-	 * @return the tables
-	 */
-	@XmlElementRef(name="table")
-	public List<TableModel> getTables() {
-		return tables;
-	}
-
-	/**
-	 * @param tables the tables to set
-	 */
-	public void setTables(List<TableModel> tables) {
-		this.tables = tables;
-	}
-
-	/* (non-Javadoc)
-	 * @see java.lang.Object#toString()
-	 */
-	@Override
-	public String toString() {
-		StringBuilder sb = new StringBuilder();
-		for(TableModel aTable : tables) {
-			sb.append(aTable.toString());
-			sb.append('\n');
-		}
-		return sb.toString();
-	}
-
-	@Override
-	public byte[] createProtobufOutput() {
-		TableList.Builder builder = TableList.newBuilder();
-		for (TableModel aTable : tables) {
-			builder.addName(aTable.getName());
-		}
-		return builder.build().toByteArray();
-	}
-
-  @Override
-  public ProtobufMessageHandler getObjectFromMessage(byte[] message)
-      throws IOException {
-    TableList.Builder builder = TableList.newBuilder();
-    builder.mergeFrom(message);
-    for (String table: builder.getNameList()) {
-      this.add(new TableModel(table));
-    }
-    return this;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableModel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableModel.java
deleted file mode 100644
index 0fb0d6e..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableModel.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest.model;
-
-import java.io.Serializable;
-
-import javax.xml.bind.annotation.XmlAttribute;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-
-/**
- * Simple representation of a table name.
- * 
- * <pre>
- * &lt;complexType name="Table"&gt;
- *   &lt;sequence&gt;
- *     &lt;element name="name" type="string"&gt;&lt;/element&gt;
- *   &lt;/sequence&gt;
- * &lt;/complexType&gt;
- * </pre>
- */
-@XmlRootElement(name="table")
-@InterfaceAudience.Private
-public class TableModel implements Serializable {
-
-	private static final long serialVersionUID = 1L;
-	
-	private String name;
-	
-	/**
-	 * Default constructor
-	 */
-	public TableModel() {}
-
-	/**
-	 * Constructor
-	 * @param name
-	 */
-	public TableModel(String name) {
-		super();
-		this.name = name;
-	}
-
-	/**
-	 * @return the name
-	 */
-	@XmlAttribute
-	public String getName() {
-		return name;
-	}
-
-	/**
-	 * @param name the name to set
-	 */
-	public void setName(String name) {
-		this.name = name;
-	}
-
-	/* (non-Javadoc)
-	 * @see java.lang.Object#toString()
-	 */
-	@Override
-	public String toString() {
-		return this.name;
-	}
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableRegionModel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableRegionModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableRegionModel.java
deleted file mode 100644
index d9b2b65..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableRegionModel.java
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest.model;
-
-import java.io.Serializable;
-
-import javax.xml.bind.annotation.XmlAttribute;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.util.Bytes;
-
-/**
- * Representation of a region of a table and its current location on the
- * storage cluster.
- * 
- * <pre>
- * &lt;complexType name="TableRegion"&gt;
- *   &lt;attribute name="name" type="string"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="id" type="int"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="startKey" type="base64Binary"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="endKey" type="base64Binary"&gt;&lt;/attribute&gt;
- *   &lt;attribute name="location" type="string"&gt;&lt;/attribute&gt;
- *  &lt;/complexType&gt;
- * </pre>
- */
-@XmlRootElement(name="Region")
-@InterfaceAudience.Private
-public class TableRegionModel implements Serializable {
-
-  private static final long serialVersionUID = 1L;
-
-  private String table;
-  private long id;
-  private byte[] startKey; 
-  private byte[] endKey;
-  private String location;
-
-  /**
-   * Constructor
-   */
-  public TableRegionModel() {}
-
-  /**
-   * Constructor
-   * @param table the table name
-   * @param id the encoded id of the region
-   * @param startKey the start key of the region
-   * @param endKey the end key of the region
-   */
-  public TableRegionModel(String table, long id, byte[] startKey,
-      byte[] endKey) {
-    this(table, id, startKey, endKey, null);
-  }
-
-  /**
-   * Constructor
-   * @param table the table name
-   * @param id the encoded id of the region
-   * @param startKey the start key of the region
-   * @param endKey the end key of the region
-   * @param location the name and port of the region server hosting the region
-   */
-  public TableRegionModel(String table, long id, byte[] startKey,
-      byte[] endKey, String location) {
-    this.table = table;
-    this.id = id;
-    this.startKey = startKey;
-    this.endKey = endKey;
-    this.location = location;
-  }
-
-  /**
-   * @return the region name
-   */
-  @XmlAttribute
-  public String getName() {
-    byte [] tableNameAsBytes = Bytes.toBytes(this.table);
-    TableName tableName = TableName.valueOf(tableNameAsBytes);
-    byte [] nameAsBytes = HRegionInfo.createRegionName(
-      tableName, this.startKey, this.id, !tableName.isSystemTable());
-    return Bytes.toString(nameAsBytes);
-  }
-
-  /**
-   * @return the encoded region id
-   */
-  @XmlAttribute 
-  public long getId() {
-    return id;
-  }
-
-  /**
-   * @return the start key
-   */
-  @XmlAttribute 
-  public byte[] getStartKey() {
-    return startKey;
-  }
-
-  /**
-   * @return the end key
-   */
-  @XmlAttribute 
-  public byte[] getEndKey() {
-    return endKey;
-  }
-
-  /**
-   * @return the name and port of the region server hosting the region
-   */
-  @XmlAttribute 
-  public String getLocation() {
-    return location;
-  }
-
-  /**
-   * @param name region printable name
-   */
-  public void setName(String name) {
-    String split[] = name.split(",");
-    this.table = split[0];
-    this.startKey = Bytes.toBytes(split[1]);
-    String tail = split[2];
-    split = tail.split("\\.");
-    id = Long.valueOf(split[0]);
-  }
-
-  /**
-   * @param id the region's encoded id
-   */
-  public void setId(long id) {
-    this.id = id;
-  }
-
-  /**
-   * @param startKey the start key
-   */
-  public void setStartKey(byte[] startKey) {
-    this.startKey = startKey;
-  }
-
-  /**
-   * @param endKey the end key
-   */
-  public void setEndKey(byte[] endKey) {
-    this.endKey = endKey;
-  }
-
-  /**
-   * @param location the name and port of the region server hosting the region
-   */
-  public void setLocation(String location) {
-    this.location = location;
-  }
-
-  /* (non-Javadoc)
-   * @see java.lang.Object#toString()
-   */
-  @Override
-  public String toString() {
-    StringBuilder sb = new StringBuilder();
-    sb.append(getName());
-    sb.append(" [\n  id=");
-    sb.append(id);
-    sb.append("\n  startKey='");
-    sb.append(Bytes.toString(startKey));
-    sb.append("'\n  endKey='");
-    sb.append(Bytes.toString(endKey));
-    if (location != null) {
-      sb.append("'\n  location='");
-      sb.append(location);
-    }
-    sb.append("'\n]\n");
-    return sb.toString();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/052a6f07/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
deleted file mode 100644
index d843e79..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
+++ /dev/null
@@ -1,361 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest.model;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import javax.xml.bind.annotation.XmlAnyAttribute;
-import javax.xml.bind.annotation.XmlAttribute;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-import javax.xml.namespace.QName;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
-import org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema;
-import org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.codehaus.jackson.annotate.JsonAnyGetter;
-import org.codehaus.jackson.annotate.JsonAnySetter;
-import org.codehaus.jackson.annotate.JsonIgnore;
-
-/**
- * A representation of HBase table descriptors.
- * 
- * <pre>
- * &lt;complexType name="TableSchema"&gt;
- *   &lt;sequence&gt;
- *     &lt;element name="column" type="tns:ColumnSchema" 
- *       maxOccurs="unbounded" minOccurs="1"&gt;&lt;/element&gt;
- *   &lt;/sequence&gt;
- *   &lt;attribute name="name" type="string"&gt;&lt;/attribute&gt;
- *   &lt;anyAttribute&gt;&lt;/anyAttribute&gt;
- * &lt;/complexType&gt;
- * </pre>
- */
-@XmlRootElement(name="TableSchema")
-@InterfaceAudience.Private
-public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
-  private static final long serialVersionUID = 1L;
-  private static final QName IS_META = new QName(HTableDescriptor.IS_META);
-  private static final QName IS_ROOT = new QName(HTableDescriptor.IS_ROOT);
-  private static final QName READONLY = new QName(HTableDescriptor.READONLY);
-  private static final QName TTL = new QName(HColumnDescriptor.TTL);
-  private static final QName VERSIONS = new QName(HConstants.VERSIONS);
-  private static final QName COMPRESSION = 
-    new QName(HColumnDescriptor.COMPRESSION);
-
-  private String name;
-  private Map<QName,Object> attrs = new LinkedHashMap<QName,Object>();
-  private List<ColumnSchemaModel> columns = new ArrayList<ColumnSchemaModel>();
-  
-  /**
-   * Default constructor.
-   */
-  public TableSchemaModel() {}
-
-  /**
-   * Constructor
-   * @param htd the table descriptor
-   */
-  public TableSchemaModel(HTableDescriptor htd) {
-    setName(htd.getTableName().getNameAsString());
-    for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e:
-        htd.getValues().entrySet()) {
-      addAttribute(Bytes.toString(e.getKey().get()), 
-        Bytes.toString(e.getValue().get()));
-    }
-    for (HColumnDescriptor hcd: htd.getFamilies()) {
-      ColumnSchemaModel columnModel = new ColumnSchemaModel();
-      columnModel.setName(hcd.getNameAsString());
-      for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e:
-          hcd.getValues().entrySet()) {
-        columnModel.addAttribute(Bytes.toString(e.getKey().get()), 
-            Bytes.toString(e.getValue().get()));
-      }
-      addColumnFamily(columnModel);
-    }
-  }
-
-  /**
-   * Add an attribute to the table descriptor
-   * @param name attribute name
-   * @param value attribute value
-   */
-  @JsonAnySetter
-  public void addAttribute(String name, Object value) {
-    attrs.put(new QName(name), value);
-  }
-
-  /**
-   * Return a table descriptor value as a string. Calls toString() on the
-   * object stored in the descriptor value map.
-   * @param name the attribute name
-   * @return the attribute value
-   */
-  public String getAttribute(String name) {
-    Object o = attrs.get(new QName(name));
-    return o != null ? o.toString() : null;
-  }
-
-  /**
-   * Add a column family to the table descriptor
-   * @param family the column family model
-   */
-  public void addColumnFamily(ColumnSchemaModel family) {
-    columns.add(family);
-  }
-
-  /**
-   * Retrieve the column family at the given index from the table descriptor
-   * @param index the index
-   * @return the column family model
-   */
-  public ColumnSchemaModel getColumnFamily(int index) {
-    return columns.get(index);
-  }
-
-  /**
-   * @return the table name
-   */
-  @XmlAttribute
-  public String getName() {
-    return name;
-  }
-
-  /**
-   * @return the map for holding unspecified (user) attributes
-   */
-  @XmlAnyAttribute
-  @JsonAnyGetter
-  public Map<QName,Object> getAny() {
-    return attrs;
-  }
-
-  /**
-   * @return the columns
-   */
-  @XmlElement(name="ColumnSchema")
-  public List<ColumnSchemaModel> getColumns() {
-    return columns;
-  }
-
-  /**
-   * @param name the table name
-   */
-  public void setName(String name) {
-    this.name = name;
-  }
-
-  /**
-   * @param columns the columns to set
-   */
-  public void setColumns(List<ColumnSchemaModel> columns) {
-    this.columns = columns;
-  }
-
-  /* (non-Javadoc)
-   * @see java.lang.Object#toString()
-   */
-  @Override
-  public String toString() {
-    StringBuilder sb = new StringBuilder();
-    sb.append("{ NAME=> '");
-    sb.append(name);
-    sb.append('\'');
-    for (Map.Entry<QName,Object> e: attrs.entrySet()) {
-      sb.append(", ");
-      sb.append(e.getKey().getLocalPart());
-      sb.append(" => '");
-      sb.append(e.getValue().toString());
-      sb.append('\'');
-    }
-    sb.append(", COLUMNS => [ ");
-    Iterator<ColumnSchemaModel> i = columns.iterator();
-    while (i.hasNext()) {
-      ColumnSchemaModel family = i.next();
-      sb.append(family.toString());
-      if (i.hasNext()) {
-        sb.append(',');
-      }
-      sb.append(' ');
-    }
-    sb.append("] }");
-    return sb.toString();
-  }
-
-  // getters and setters for common schema attributes
-
-  // cannot be standard bean type getters and setters, otherwise this would
-  // confuse JAXB
-
-  /**
-   * @return true if IS_META attribute exists and is truel
-   */
-  public boolean __getIsMeta() {
-    Object o = attrs.get(IS_META);
-    return o != null ? Boolean.valueOf(o.toString()) : false;
-  }
-
-  /**
-   * @return true if IS_ROOT attribute exists and is truel
-   */
-  public boolean __getIsRoot() {
-    Object o = attrs.get(IS_ROOT);
-    return o != null ? Boolean.valueOf(o.toString()) : false;
-  }
-
-  /**
-   * @return true if READONLY attribute exists and is truel
-   */
-  public boolean __getReadOnly() {
-    Object o = attrs.get(READONLY);
-    return o != null ? 
-      Boolean.valueOf(o.toString()) : HTableDescriptor.DEFAULT_READONLY;
-  }
-
-  /**
-   * @param value desired value of IS_META attribute
-   */
-  public void __setIsMeta(boolean value) {
-    attrs.put(IS_META, Boolean.toString(value));
-  }
-
-  /**
-   * @param value desired value of IS_ROOT attribute
-   */
-  public void __setIsRoot(boolean value) {
-    attrs.put(IS_ROOT, Boolean.toString(value));
-  }
-
-  /**
-   * @param value desired value of READONLY attribute
-   */
-  public void __setReadOnly(boolean value) {
-    attrs.put(READONLY, Boolean.toString(value));
-  }
-
-  @Override
-  public byte[] createProtobufOutput() {
-    TableSchema.Builder builder = TableSchema.newBuilder();
-    builder.setName(name);
-    for (Map.Entry<QName, Object> e: attrs.entrySet()) {
-      TableSchema.Attribute.Builder attrBuilder = 
-        TableSchema.Attribute.newBuilder();
-      attrBuilder.setName(e.getKey().getLocalPart());
-      attrBuilder.setValue(e.getValue().toString());
-      builder.addAttrs(attrBuilder);
-    }
-    for (ColumnSchemaModel family: columns) {
-      Map<QName, Object> familyAttrs = family.getAny();
-      ColumnSchema.Builder familyBuilder = ColumnSchema.newBuilder();
-      familyBuilder.setName(family.getName());
-      for (Map.Entry<QName, Object> e: familyAttrs.entrySet()) {
-        ColumnSchema.Attribute.Builder attrBuilder = 
-          ColumnSchema.Attribute.newBuilder();
-        attrBuilder.setName(e.getKey().getLocalPart());
-        attrBuilder.setValue(e.getValue().toString());
-        familyBuilder.addAttrs(attrBuilder);
-      }
-      if (familyAttrs.containsKey(TTL)) {
-        familyBuilder.setTtl(
-          Integer.valueOf(familyAttrs.get(TTL).toString()));
-      }
-      if (familyAttrs.containsKey(VERSIONS)) {
-        familyBuilder.setMaxVersions(
-          Integer.valueOf(familyAttrs.get(VERSIONS).toString()));
-      }
-      if (familyAttrs.containsKey(COMPRESSION)) {
-        familyBuilder.setCompression(familyAttrs.get(COMPRESSION).toString());
-      }
-      builder.addColumns(familyBuilder);
-    }
-    if (attrs.containsKey(READONLY)) {
-      builder.setReadOnly(
-        Boolean.valueOf(attrs.get(READONLY).toString()));
-    }
-    return builder.build().toByteArray();
-  }
-
-  @Override
-  public ProtobufMessageHandler getObjectFromMessage(byte[] message) 
-      throws IOException {
-    TableSchema.Builder builder = TableSchema.newBuilder();
-    builder.mergeFrom(message);
-    this.setName(builder.getName());
-    for (TableSchema.Attribute attr: builder.getAttrsList()) {
-      this.addAttribute(attr.getName(), attr.getValue());
-    }
-    if (builder.hasReadOnly()) {
-      this.addAttribute(HTableDescriptor.READONLY, builder.getReadOnly());
-    }
-    for (ColumnSchema family: builder.getColumnsList()) {
-      ColumnSchemaModel familyModel = new ColumnSchemaModel();
-      familyModel.setName(family.getName());
-      for (ColumnSchema.Attribute attr: family.getAttrsList()) {
-        familyModel.addAttribute(attr.getName(), attr.getValue());
-      }
-      if (family.hasTtl()) {
-        familyModel.addAttribute(HColumnDescriptor.TTL, family.getTtl());
-      }
-      if (family.hasMaxVersions()) {
-        familyModel.addAttribute(HConstants.VERSIONS,
-          family.getMaxVersions());
-      }
-      if (family.hasCompression()) {
-        familyModel.addAttribute(HColumnDescriptor.COMPRESSION,
-          family.getCompression());
-      }
-      this.addColumnFamily(familyModel);
-    }
-    return this;
-  }
-
-  /**
-   * @return a table descriptor
-   */
-  @JsonIgnore
-  public HTableDescriptor getTableDescriptor() {
-    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(getName()));
-    for (Map.Entry<QName, Object> e: getAny().entrySet()) {
-      htd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
-    }
-    for (ColumnSchemaModel column: getColumns()) {
-      HColumnDescriptor hcd = new HColumnDescriptor(column.getName());
-      for (Map.Entry<QName, Object> e: column.getAny().entrySet()) {
-        hcd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
-      }
-      htd.addFamily(hcd);
-    }
-    return htd;
-  }
-
-}


Mime
View raw message