hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From reidc...@apache.org
Subject [hbase] branch branch-1 updated: HBASE-21674:Port HBASE-21652 (Refactor ThriftServer making thrift2 server inherited from thrift1 server) to branch-1 (#2941)
Date Tue, 22 Jun 2021 03:26:56 GMT
This is an automated email from the ASF dual-hosted git repository.

reidchan pushed a commit to branch branch-1
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-1 by this push:
     new 7e57fec  HBASE-21674:Port HBASE-21652 (Refactor ThriftServer making thrift2 server inherited from thrift1 server) to branch-1 (#2941)
7e57fec is described below

commit 7e57fecda8510354aab730d2890dc6cd15d7c264
Author: YutSean <33572832+YutSean@users.noreply.github.com>
AuthorDate: Tue Jun 22 11:26:12 2021 +0800

    HBASE-21674:Port HBASE-21652 (Refactor ThriftServer making thrift2 server inherited from thrift1 server) to branch-1 (#2941)
    
    Signed-off-by: Reid Chan <reidchan@apache.org>
---
 .../apache/hadoop/hbase/net/BoundSocketMaker.java  |    85 +
 .../org/apache/hadoop/hbase/net/TestAddress.java   |    54 +
 .../org/apache/hadoop/hbase/thrift/Constants.java  |   154 +
 .../hadoop/hbase/thrift/HBaseServiceHandler.java   |    91 +
 .../hbase/thrift/HThreadedSelectorServerArgs.java  |     2 +-
 .../hbase/thrift/HbaseHandlerMetricsProxy.java     |    27 +-
 .../org/apache/hadoop/hbase/thrift/ImplType.java   |   142 +
 .../hadoop/hbase/thrift/IncrementCoalescer.java    |    70 +-
 .../hbase/thrift/TBoundedThreadPoolServer.java     |     2 +-
 .../hbase/thrift/ThriftHBaseServiceHandler.java    |  1290 +
 .../hadoop/hbase/thrift/ThriftHttpServlet.java     |    67 +-
 .../apache/hadoop/hbase/thrift/ThriftMetrics.java  |    11 +-
 .../apache/hadoop/hbase/thrift/ThriftServer.java   |   780 +-
 .../hadoop/hbase/thrift/ThriftServerRunner.java    |  1957 -
 .../hadoop/hbase/thrift/ThriftUtilities.java       |     2 +-
 .../hbase/thrift/generated/AlreadyExists.java      |     2 +-
 .../hbase/thrift/generated/BatchMutation.java      |     2 +-
 .../hbase/thrift/generated/ColumnDescriptor.java   |     2 +-
 .../hadoop/hbase/thrift/generated/Hbase.java       |  3905 +-
 .../hadoop/hbase/thrift/generated/IOError.java     |     2 +-
 .../hbase/thrift/generated/IllegalArgument.java    |     2 +-
 .../hadoop/hbase/thrift/generated/Mutation.java    |     2 +-
 .../hadoop/hbase/thrift/generated/TAppend.java     |     2 +-
 .../hadoop/hbase/thrift/generated/TCell.java       |     2 +-
 .../hadoop/hbase/thrift/generated/TColumn.java     |     2 +-
 .../hadoop/hbase/thrift/generated/TIncrement.java  |     2 +-
 .../hadoop/hbase/thrift/generated/TRegionInfo.java |     2 +-
 .../hadoop/hbase/thrift/generated/TRowResult.java  |     2 +-
 .../hadoop/hbase/thrift/generated/TScan.java       |     2 +-
 .../generated/TThriftServerType.java}              |    22 +-
 .../hbase/thrift2/ThriftHBaseServiceHandler.java   |   301 +-
 .../apache/hadoop/hbase/thrift2/ThriftServer.java  |   584 +-
 .../hadoop/hbase/thrift2/ThriftUtilities.java      |   834 +-
 .../hadoop/hbase/thrift2/generated/TAppend.java    |   113 +-
 .../hbase/thrift2/generated/TAuthorization.java    |     2 +-
 .../{TCompareOp.java => TBloomFilterType.java}     |    47 +-
 .../hbase/thrift2/generated/TCellVisibility.java   |     2 +-
 .../hadoop/hbase/thrift2/generated/TColumn.java    |     2 +-
 .../thrift2/generated/TColumnFamilyDescriptor.java |  2492 +
 .../hbase/thrift2/generated/TColumnIncrement.java  |     2 +-
 .../hbase/thrift2/generated/TColumnValue.java      |   109 +-
 .../hadoop/hbase/thrift2/generated/TCompareOp.java |     2 +-
 ...{TCompareOp.java => TCompressionAlgorithm.java} |    38 +-
 .../hbase/thrift2/generated/TConsistency.java      |     2 +-
 .../{TCompareOp.java => TDataBlockEncoding.java}   |    41 +-
 .../hadoop/hbase/thrift2/generated/TDelete.java    |     2 +-
 .../hbase/thrift2/generated/TDeleteType.java       |    10 +-
 .../hbase/thrift2/generated/TDurability.java       |     5 +-
 .../{TConsistency.java => TFilterByOperator.java}  |    23 +-
 .../hadoop/hbase/thrift2/generated/TGet.java       |   526 +-
 .../hbase/thrift2/generated/THBaseService.java     | 49869 +++++++++++++++----
 .../hbase/thrift2/generated/THRegionInfo.java      |     2 +-
 .../hbase/thrift2/generated/THRegionLocation.java  |     2 +-
 .../hadoop/hbase/thrift2/generated/TIOError.java   |     2 +-
 .../hbase/thrift2/generated/TIllegalArgument.java  |     2 +-
 .../hadoop/hbase/thrift2/generated/TIncrement.java |   113 +-
 .../hbase/thrift2/generated/TKeepDeletedCells.java |    65 +
 .../hbase/thrift2/generated/TLogQueryFilter.java   |  1057 +
 .../generated/{TConsistency.java => TLogType.java} |    21 +-
 .../hadoop/hbase/thrift2/generated/TMutation.java  |     2 +-
 .../thrift2/generated/TNamespaceDescriptor.java    |   537 +
 .../hbase/thrift2/generated/TOnlineLogRecord.java  |  1645 +
 .../hadoop/hbase/thrift2/generated/TPut.java       |     2 +-
 .../{TConsistency.java => TReadType.java}          |    24 +-
 .../hadoop/hbase/thrift2/generated/TResult.java    |   111 +-
 .../hbase/thrift2/generated/TRowMutations.java     |     2 +-
 .../hadoop/hbase/thrift2/generated/TScan.java      |   371 +-
 .../hbase/thrift2/generated/TServerName.java       |     2 +-
 .../{TAppend.java => TTableDescriptor.java}        |   474 +-
 .../{TColumnIncrement.java => TTableName.java}     |   322 +-
 .../{TConsistency.java => TThriftServerType.java}  |    20 +-
 .../hadoop/hbase/thrift2/generated/TTimeRange.java |     2 +-
 .../hbase-webapps/static/css/bootstrap-theme.css   |   394 +
 .../static/css/bootstrap-theme.min.css             |    14 +-
 .../hbase-webapps/static/css/bootstrap.css         |  6805 +++
 .../hbase-webapps/static/css/bootstrap.min.css     |    13 +-
 .../static/fonts/glyphicons-halflings-regular.eot  |   Bin 20127 -> 14079 bytes
 .../static/fonts/glyphicons-halflings-regular.svg  |   480 +-
 .../static/fonts/glyphicons-halflings-regular.ttf  |   Bin 45404 -> 29512 bytes
 .../static/fonts/glyphicons-halflings-regular.woff |   Bin 23424 -> 16448 bytes
 .../fonts/glyphicons-halflings-regular.woff2       |   Bin 18028 -> 0 bytes
 .../resources/hbase-webapps/static/js/bootstrap.js |  1999 +
 .../hbase-webapps/static/js/bootstrap.min.js       |    12 +-
 .../main/resources/hbase-webapps/thrift/thrift.jsp |    20 +-
 .../org/apache/hadoop/hbase/thrift/Hbase.thrift    |    91 +-
 .../org/apache/hadoop/hbase/thrift2/hbase.thrift   |   505 +-
 .../hbase/thrift/HBaseThriftTestingUtility.java    |   132 +
 .../hadoop/hbase/thrift/TestThriftHttpServer.java  |   200 +-
 .../hadoop/hbase/thrift/TestThriftServer.java      |   225 +-
 .../hbase/thrift/TestThriftServerCmdLine.java      |   287 +-
 .../hadoop/hbase/thrift/ThriftServerRunner.java    |    70 +
 .../thrift2/TestThriftHBaseServiceHandler.java     |    19 +-
 .../TestThriftHBaseServiceHandlerWithLabels.java   |     1 -
 93 files changed, 63121 insertions(+), 16617 deletions(-)

diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/net/BoundSocketMaker.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/net/BoundSocketMaker.java
new file mode 100644
index 0000000..ef46d17
--- /dev/null
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/net/BoundSocketMaker.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.net;
+
+import com.google.common.base.Supplier;
+import java.io.Closeable;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.ServerSocket;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * Utility to generate a bound socket. Useful testing for BindException.
+ * Use one of the Constructors to create an instance of this class. On creation it will have put
+ * up a ServerSocket on a random port. Get the port it is bound to using {@link #getPort()}. In
+ * your test, then try to start a Server using same port to generate a BindException. Call
+ * {@link #close()} when done to shut down the Socket.
+ */
+public final class BoundSocketMaker implements Closeable {
+  private static final Log LOG = LogFactory.getLog(BoundSocketMaker.class);
+  private final ServerSocket socket;
+
+  private BoundSocketMaker() {
+    this.socket = null;
+  }
+
+  public BoundSocketMaker(Supplier<Integer> randomPortMaker) {
+    this(InetAddress.getLoopbackAddress().getHostName(), randomPortMaker);
+  }
+
+  public BoundSocketMaker(final String hostname, Supplier<Integer> randomPortMaker) {
+    this.socket = get(hostname, randomPortMaker);
+  }
+
+  public int getPort() {
+    return this.socket.getLocalPort();
+  }
+
+  /**
+   * @return Returns a bound socket; be sure to close when done.
+   */
+  private ServerSocket get(String hostname, Supplier<Integer> randomPortMaker) {
+    ServerSocket ss = null;
+    int port = -1;
+    while (true) {
+      port = randomPortMaker.get();
+      try {
+        ss = new ServerSocket();
+        ss.bind(new InetSocketAddress(hostname, port));
+        break;
+      } catch (IOException ioe) {
+        LOG.warn("Failed bind", ioe);
+        try {
+          ss.close();
+        } catch (IOException ioe2) {
+          LOG.warn("FAILED CLOSE of failed bind socket", ioe2);
+        }
+      }
+    }
+    return ss;
+  }
+
+  @Override public void close() throws IOException {
+    if (this.socket != null) {
+      this.socket.close();
+    }
+  }
+}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/net/TestAddress.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/net/TestAddress.java
new file mode 100644
index 0000000..ffa0255
--- /dev/null
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/net/TestAddress.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.net;
+
+import static org.junit.Assert.assertEquals;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({ MiscTests.class, SmallTests.class })
+public class TestAddress {
+
+  @Test
+  public void testGetHostWithoutDomain() {
+    assertEquals("a:123",
+        toStringWithoutDomain(Address.fromParts("a.b.c", 123)));
+    assertEquals("1:123",
+        toStringWithoutDomain(Address.fromParts("1.b.c", 123)));
+    assertEquals("123.456.789.1:123",
+        toStringWithoutDomain(Address.fromParts("123.456.789.1", 123)));
+    assertEquals("[2001:db8::1]:80",
+        toStringWithoutDomain(Address.fromParts("[2001:db8::1]", 80)));
+  }
+
+  private String toStringWithoutDomain(Address address) {
+    String hostname = address.getHostname();
+    String[] parts = hostname.split("\\.");
+    if (parts.length > 1) {
+      for (String part: parts) {
+        if (!StringUtils.isNumeric(part)) {
+          return Address.fromParts(parts[0], address.getPort()).toString();
+        }
+      }
+    }
+    return address.toString();
+  }
+}
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java
new file mode 100644
index 0000000..ce700c2
--- /dev/null
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.thrift;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+/**
+ * Thrift related constants.
+ */
+@InterfaceAudience.Private
+public final class Constants {
+  private Constants(){}
+
+  public static final int DEFAULT_HTTP_MAX_HEADER_SIZE = 64 * 1024; // 64k
+
+  public static final String SERVER_TYPE_CONF_KEY = "hbase.regionserver.thrift.server.type";
+
+  public static final String COMPACT_CONF_KEY = "hbase.regionserver.thrift.compact";
+  public static final boolean COMPACT_CONF_DEFAULT = false;
+
+  public static final String FRAMED_CONF_KEY = "hbase.regionserver.thrift.framed";
+  public static final boolean FRAMED_CONF_DEFAULT = false;
+
+  public static final String MAX_FRAME_SIZE_CONF_KEY =
+      "hbase.regionserver.thrift.framed.max_frame_size_in_mb";
+  public static final int MAX_FRAME_SIZE_CONF_DEFAULT = 2;
+
+  public static final String COALESCE_INC_KEY = "hbase.regionserver.thrift.coalesceIncrement";
+  public static final String USE_HTTP_CONF_KEY = "hbase.regionserver.thrift.http";
+
+  public static final String HTTP_MIN_THREADS_KEY = "hbase.thrift.http_threads.min";
+  public static final int HTTP_MIN_THREADS_KEY_DEFAULT = 2;
+
+  public static final String HTTP_MAX_THREADS_KEY = "hbase.thrift.http_threads.max";
+  public static final int HTTP_MAX_THREADS_KEY_DEFAULT = 100;
+
+  // ssl related configs
+  public static final String THRIFT_SSL_ENABLED_KEY = "hbase.thrift.ssl.enabled";
+  public static final String THRIFT_SSL_KEYSTORE_STORE_KEY = "hbase.thrift.ssl.keystore.store";
+  public static final String THRIFT_SSL_KEYSTORE_PASSWORD_KEY =
+      "hbase.thrift.ssl.keystore.password";
+  public static final String THRIFT_SSL_KEYSTORE_KEYPASSWORD_KEY
+      = "hbase.thrift.ssl.keystore.keypassword";
+  public static final String THRIFT_SSL_EXCLUDE_CIPHER_SUITES_KEY =
+      "hbase.thrift.ssl.exclude.cipher.suites";
+  public static final String THRIFT_SSL_INCLUDE_CIPHER_SUITES_KEY =
+      "hbase.thrift.ssl.include.cipher.suites";
+  public static final String THRIFT_SSL_EXCLUDE_PROTOCOLS_KEY =
+      "hbase.thrift.ssl.exclude.protocols";
+  public static final String THRIFT_SSL_INCLUDE_PROTOCOLS_KEY =
+      "hbase.thrift.ssl.include.protocols";
+
+  public static final String THRIFT_SUPPORT_PROXYUSER_KEY = "hbase.thrift.support.proxyuser";
+
+  //kerberos related configs
+  public static final String THRIFT_DNS_INTERFACE_KEY = "hbase.thrift.dns.interface";
+  public static final String THRIFT_DNS_NAMESERVER_KEY = "hbase.thrift.dns.nameserver";
+  public static final String THRIFT_KERBEROS_PRINCIPAL_KEY = "hbase.thrift.kerberos.principal";
+  public static final String THRIFT_KEYTAB_FILE_KEY = "hbase.thrift.keytab.file";
+  public static final String THRIFT_SPNEGO_PRINCIPAL_KEY = "hbase.thrift.spnego.principal";
+  public static final String THRIFT_SPNEGO_KEYTAB_FILE_KEY = "hbase.thrift.spnego.keytab.file";
+
+  /**
+   * Amount of time in milliseconds before a server thread will timeout
+   * waiting for client to send data on a connected socket. Currently,
+   * applies only to TBoundedThreadPoolServer
+   */
+  public static final String THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY =
+      "hbase.thrift.server.socket.read.timeout";
+  public static final int THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT = 60000;
+
+  /**
+   * Thrift quality of protection configuration key. Valid values can be:
+   * auth-conf: authentication, integrity and confidentiality checking
+   * auth-int: authentication and integrity checking
+   * auth: authentication only
+   *
+   * This is used to authenticate the callers and support impersonation.
+   * The thrift server and the HBase cluster must run in secure mode.
+   */
+  public static final String THRIFT_QOP_KEY = "hbase.thrift.security.qop";
+
+  public static final String BACKLOG_CONF_KEY = "hbase.regionserver.thrift.backlog";
+  public static final int BACKLOG_CONF_DEAFULT = 0;
+
+  public static final String BIND_CONF_KEY = "hbase.regionserver.thrift.ipaddress";
+  public static final String DEFAULT_BIND_ADDR = "0.0.0.0";
+
+  public static final String PORT_CONF_KEY = "hbase.regionserver.thrift.port";
+  public static final int DEFAULT_LISTEN_PORT = 9090;
+
+  public static final String THRIFT_HTTP_ALLOW_OPTIONS_METHOD =
+      "hbase.thrift.http.allow.options.method";
+  public static final boolean THRIFT_HTTP_ALLOW_OPTIONS_METHOD_DEFAULT = false;
+
+  public static final String THRIFT_INFO_SERVER_PORT = "hbase.thrift.info.port";
+  public static final int THRIFT_INFO_SERVER_PORT_DEFAULT = 9095;
+
+  public static final String THRIFT_INFO_SERVER_BINDING_ADDRESS = "hbase.thrift.info.bindAddress";
+  public static final String THRIFT_INFO_SERVER_BINDING_ADDRESS_DEFAULT = "0.0.0.0";
+
+  public static final String THRIFT_QUEUE_SIZE = "hbase.thrift.queue.size";
+  public static final int THRIFT_QUEUE_SIZE_DEFAULT = Integer.MAX_VALUE;
+
+  public static final String THRIFT_SELECTOR_NUM = "hbase.thrift.selector.num";
+
+  public static final String THRIFT_FILTERS = "hbase.thrift.filters";
+
+  // Command line options
+
+  public static final String READ_TIMEOUT_OPTION = "readTimeout";
+  public static final String MIN_WORKERS_OPTION = "minWorkers";
+  public static final String MAX_WORKERS_OPTION = "workers";
+  public static final String MAX_QUEUE_SIZE_OPTION = "queue";
+  public static final String SELECTOR_NUM_OPTION = "selectors";
+  public static final String KEEP_ALIVE_SEC_OPTION = "keepAliveSec";
+  public static final String BIND_OPTION = "bind";
+  public static final String COMPACT_OPTION = "compact";
+  public static final String FRAMED_OPTION = "framed";
+  public static final String PORT_OPTION = "port";
+  public static final String INFOPORT_OPTION = "infoport";
+
+  //for thrift2 server
+  public static final String READONLY_OPTION ="readonly";
+
+  public static final String THRIFT_READONLY_ENABLED = "hbase.thrift.readonly";
+  public static final boolean THRIFT_READONLY_ENABLED_DEFAULT = false;
+
+  public static final String HBASE_THRIFT_CLIENT_SCANNER_CACHING =
+      "hbase.thrift.client.scanner.caching";
+
+  public static final int HBASE_THRIFT_CLIENT_SCANNER_CACHING_DEFAULT = 20;
+
+  public static final String HBASE_THRIFT_SERVER_NAME = "hbase.thrift.server.name";
+  public static final String HBASE_THRIFT_SERVER_PORT = "hbase.thrift.server.port";
+
+  public static final String HBASE_THRIFT_CLIENT_BUIDLER_CLASS =
+      "hbase.thrift.client.builder.class";
+}
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HBaseServiceHandler.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HBaseServiceHandler.java
new file mode 100644
index 0000000..497ea53
--- /dev/null
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HBaseServiceHandler.java
@@ -0,0 +1,91 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.thrift;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.security.UserProvider;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ConnectionCache;
+
+
+/**
+ * abstract class for HBase handler
+ * providing a Connection cache and get table/admin method
+ */
+@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
+public abstract class HBaseServiceHandler {
+  public static final String CLEANUP_INTERVAL = "hbase.thrift.connection.cleanup-interval";
+  public static final String MAX_IDLETIME = "hbase.thrift.connection.max-idletime";
+
+  protected Configuration conf;
+
+  protected final ConnectionCache connectionCache;
+
+  public HBaseServiceHandler(final Configuration c,
+      final UserProvider userProvider) throws IOException {
+    this.conf = c;
+    int cleanInterval = conf.getInt(CLEANUP_INTERVAL, 10 * 1000);
+    int maxIdleTime = conf.getInt(MAX_IDLETIME, 10 * 60 * 1000);
+    connectionCache = new ConnectionCache(
+        conf, userProvider, cleanInterval, maxIdleTime);
+  }
+
+  protected ThriftMetrics metrics = null;
+
+  public void initMetrics(ThriftMetrics metrics) {
+    this.metrics = metrics;
+  }
+
+  public void setEffectiveUser(String effectiveUser) {
+    connectionCache.setEffectiveUser(effectiveUser);
+  }
+
+  /**
+   * Obtain HBaseAdmin. Creates the instance if it is not already created.
+   */
+  protected Admin getAdmin() throws IOException {
+    return connectionCache.getAdmin();
+  }
+
+  /**
+   * Creates and returns a Table instance from a given table name.
+   *
+   * @param tableName
+   *          name of table
+   * @return Table object
+   * @throws IOException if getting the table fails
+   */
+  protected Table getTable(final byte[] tableName) throws IOException {
+    String table = Bytes.toString(tableName);
+    return connectionCache.getTable(table);
+  }
+
+  protected  Table getTable(final ByteBuffer tableName) throws IOException {
+    return getTable(Bytes.getBytes(tableName));
+  }
+
+
+}
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java
index da33cc0..72b5cb3 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java
@@ -22,8 +22,8 @@ package org.apache.hadoop.hbase.thrift;
 import java.util.Locale;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.thrift.server.TThreadedSelectorServer;
 import org.apache.thrift.transport.TNonblockingServerTransport;
 
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java
index 794143d..91ec9d2 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java
@@ -23,11 +23,10 @@ import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.thrift.generated.Hbase;
+import org.apache.hadoop.hbase.thrift2.generated.THBaseService;
 
 
 /**
@@ -35,12 +34,9 @@ import org.apache.hadoop.hbase.thrift.generated.Hbase;
  * time of each call to ThriftMetrics.
  */
 @InterfaceAudience.Private
-public class HbaseHandlerMetricsProxy implements InvocationHandler {
-
-  private static final Log LOG = LogFactory.getLog(
-      HbaseHandlerMetricsProxy.class);
+public final class HbaseHandlerMetricsProxy implements InvocationHandler {
 
-  private final Hbase.Iface handler;
+  private final Object handler;
   private final ThriftMetrics metrics;
 
   public static Hbase.Iface newInstance(Hbase.Iface handler,
@@ -52,8 +48,19 @@ public class HbaseHandlerMetricsProxy implements InvocationHandler {
         new HbaseHandlerMetricsProxy(handler, metrics, conf));
   }
 
-  private HbaseHandlerMetricsProxy(
-      Hbase.Iface handler, ThriftMetrics metrics, Configuration conf) {
+  // for thrift 2
+  public static THBaseService.Iface newInstance(THBaseService.Iface handler,
+      ThriftMetrics metrics,
+      Configuration conf) {
+    return (THBaseService.Iface) Proxy.newProxyInstance(
+        handler.getClass().getClassLoader(),
+        new Class[]{THBaseService.Iface.class},
+        new HbaseHandlerMetricsProxy(handler, metrics, conf)
+    );
+  }
+
+  private HbaseHandlerMetricsProxy(Object handler, ThriftMetrics metrics,
+      Configuration conf) {
     this.handler = handler;
     this.metrics = metrics;
   }
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ImplType.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ImplType.java
new file mode 100644
index 0000000..5fbf808
--- /dev/null
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ImplType.java
@@ -0,0 +1,142 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.thrift;
+
+import static org.apache.hadoop.hbase.thrift.Constants.SERVER_TYPE_CONF_KEY;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionGroup;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.thrift.server.THsHaServer;
+import org.apache.thrift.server.TNonblockingServer;
+import org.apache.thrift.server.TServer;
+import org.apache.thrift.server.TThreadedSelectorServer;
+
+/** An enum of server implementation selections */
+@InterfaceAudience.Private
+public enum ImplType {
+  HS_HA("hsha", true, THsHaServer.class, true),
+  NONBLOCKING("nonblocking", true, TNonblockingServer.class, true),
+  THREAD_POOL("threadpool", false, TBoundedThreadPoolServer.class, true),
+  THREADED_SELECTOR("threadedselector", true, TThreadedSelectorServer.class, true);
+
+  private static final Log LOG = LogFactory.getLog(ImplType.class);
+  public static final ImplType DEFAULT = THREAD_POOL;
+
+  final String option;
+  final boolean isAlwaysFramed;
+  final Class<? extends TServer> serverClass;
+  final boolean canSpecifyBindIP;
+
+  private ImplType(String option, boolean isAlwaysFramed,
+      Class<? extends TServer> serverClass, boolean canSpecifyBindIP) {
+    this.option = option;
+    this.isAlwaysFramed = isAlwaysFramed;
+    this.serverClass = serverClass;
+    this.canSpecifyBindIP = canSpecifyBindIP;
+  }
+
+  /**
+   * @return <code>-option</code>
+   */
+  @Override
+  public String toString() {
+    return "-" + option;
+  }
+
+  public String getOption() {
+    return option;
+  }
+
+  public boolean isAlwaysFramed() {
+    return isAlwaysFramed;
+  }
+
+  public String getDescription() {
+    StringBuilder sb = new StringBuilder("Use the " +
+        serverClass.getSimpleName());
+    if (isAlwaysFramed){
+      sb.append(" This implies the framed transport.");
+    }
+    if (this == DEFAULT) {
+      sb.append("This is the default.");
+    }
+    return sb.toString();
+  }
+
+  static OptionGroup createOptionGroup() {
+    OptionGroup group  = new OptionGroup();
+    for (ImplType t: values()) {
+      group.addOption(new Option(t.option, t.getDescription()));
+    }
+    return group;
+  }
+
+  public static ImplType getServerImpl(Configuration conf) {
+    String confType = conf.get(SERVER_TYPE_CONF_KEY, THREAD_POOL.option);
+    for (ImplType t: values()) {
+      if (confType.equals(t.option)){
+        return t;
+      }
+    }
+    throw new AssertionError("Unkown server ImplType.option:" + confType);
+  }
+
+  static void setServerImpl(CommandLine cmd, Configuration conf) {
+    ImplType chosenType = null;
+    int numChosen = 0;
+    for (ImplType t: values()) {
+      if (cmd.hasOption(t.option)) {
+        chosenType = t;
+        ++numChosen;
+      }
+    }
+    if (numChosen < 1) {
+      LOG.info("Using default thrift server type.");
+      chosenType = DEFAULT;
+    } else if (numChosen > 1) {
+      throw new AssertionError("Exactly one option out of " +
+          Arrays.toString(values()) + " has to be specified.");
+    }
+    LOG.info("Using thrift server type " + chosenType.option);
+    conf.set(SERVER_TYPE_CONF_KEY, chosenType.option);
+  }
+
+  public String simpleClassName() {
+    return serverClass.getSimpleName();
+  }
+
+  public static List<String> serversThatCannotSpecifyBindIP() {
+    List<String> l = new ArrayList<>();
+    for (ImplType t: values()) {
+      if (!t.canSpecifyBindIP) {
+        l.add(t.simpleClassName());
+      }
+    }
+    return l;
+  }
+}
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
index aec7cc9..6632798 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.hadoop.hbase.thrift;
 
 import java.io.IOException;
@@ -25,19 +26,22 @@ import java.util.concurrent.Callable;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler;
 import org.apache.hadoop.hbase.thrift.generated.TIncrement;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.MBeanUtil;
 import org.apache.hadoop.hbase.util.Threads;
+import org.apache.thrift.TException;
 
 /**
  * This class will coalesce increments from a thift server if
@@ -48,6 +52,7 @@ import org.apache.hadoop.hbase.util.Threads;
  * thrift server dies or is shut down before everything in the queue is drained.
  *
  */
+@InterfaceAudience.Private
 public class IncrementCoalescer implements IncrementCoalescerMBean {
   /**
    * Used to identify a cell that will be incremented.
@@ -79,6 +84,10 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
       return rowKey;
     }
 
+    public void setRowKey(byte[] rowKey) {
+      this.rowKey = rowKey;
+    }
+
     public byte[] getFamily() {
       return family;
     }
@@ -117,7 +126,6 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
       if (getClass() != obj.getClass()) {
         return false;
       }
-
       FullyQualifiedRow other = (FullyQualifiedRow) obj;
 
       if (!Arrays.equals(family, other.family)) {
@@ -129,34 +137,63 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
       if (!Arrays.equals(rowKey, other.rowKey)) {
         return false;
       }
-
+      if (!Arrays.equals(table, other.table)) {
+        return false;
+      }
       return Arrays.equals(table, other.table);
     }
+
+  }
+
+  static class DaemonThreadFactory implements ThreadFactory {
+    static final AtomicInteger poolNumber = new AtomicInteger(1);
+    final ThreadGroup group;
+    final AtomicInteger threadNumber = new AtomicInteger(1);
+    final String namePrefix;
+
+    DaemonThreadFactory() {
+      SecurityManager s = System.getSecurityManager();
+      group = (s != null) ? s.getThreadGroup() : Thread.currentThread().getThreadGroup();
+      namePrefix = "ICV-" + poolNumber.getAndIncrement() + "-thread-";
+    }
+
+    @Override
+    public Thread newThread(Runnable r) {
+      Thread t = new Thread(group, r, namePrefix + threadNumber.getAndIncrement(), 0);
+      if (!t.isDaemon()) {
+        t.setDaemon(true);
+      }
+      if (t.getPriority() != Thread.NORM_PRIORITY) {
+        t.setPriority(Thread.NORM_PRIORITY);
+      }
+      return t;
+    }
   }
 
   private final AtomicLong failedIncrements = new AtomicLong();
   private final AtomicLong successfulCoalescings = new AtomicLong();
   private final AtomicLong totalIncrements = new AtomicLong();
   private final ConcurrentMap<FullyQualifiedRow, Long> countersMap =
-      new ConcurrentHashMap<>(100000, 0.75f, 1500);
+      new ConcurrentHashMap<FullyQualifiedRow, Long>(100000, 0.75f, 1500);
   private final ThreadPoolExecutor pool;
-  private final HBaseHandler handler;
+  private final ThriftHBaseServiceHandler handler;
 
   private int maxQueueSize = 500000;
   private static final int CORE_POOL_SIZE = 1;
 
   private static final Log LOG = LogFactory.getLog(FullyQualifiedRow.class);
 
-  public IncrementCoalescer(HBaseHandler hand) {
+  public IncrementCoalescer(ThriftHBaseServiceHandler hand) {
     this.handler = hand;
     LinkedBlockingQueue<Runnable> queue = new LinkedBlockingQueue<Runnable>();
-    pool = new ThreadPoolExecutor(CORE_POOL_SIZE, CORE_POOL_SIZE, 50, TimeUnit.MILLISECONDS, queue,
+    pool =
+        new ThreadPoolExecutor(CORE_POOL_SIZE, CORE_POOL_SIZE, 50, TimeUnit.MILLISECONDS, queue,
             Threads.newDaemonThreadFactory("IncrementCoalescer"));
 
     MBeanUtil.registerMBean("thrift", "Thrift", this);
   }
 
-  public boolean queueIncrement(TIncrement inc) {
+  public boolean queueIncrement(TIncrement inc) throws TException {
     if (!canQueue()) {
       failedIncrements.incrementAndGet();
       return false;
@@ -164,7 +201,7 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
     return internalQueueTincrement(inc);
   }
 
-  public boolean queueIncrements(List<TIncrement> incs) {
+  public boolean queueIncrements(List<TIncrement> incs) throws TException {
     if (!canQueue()) {
       failedIncrements.incrementAndGet();
       return false;
@@ -173,11 +210,11 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
     for (TIncrement tinc : incs) {
       internalQueueTincrement(tinc);
     }
-
     return true;
+
   }
 
-  private boolean internalQueueTincrement(TIncrement inc) {
+  private boolean internalQueueTincrement(TIncrement inc) throws TException {
     byte[][] famAndQf = KeyValue.parseColumn(inc.getColumn());
     if (famAndQf.length != 2) {
       return false;
@@ -188,9 +225,10 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
   }
 
   private boolean internalQueueIncrement(byte[] tableName, byte[] rowKey, byte[] fam,
-      byte[] qual, long ammount) {
+      byte[] qual, long ammount) throws TException {
     int countersMapSize = countersMap.size();
 
+
     //Make sure that the number of threads is scaled.
     dynamicallySetCoreSize(countersMapSize);
 
@@ -204,7 +242,7 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
       Long value = countersMap.remove(key);
       if (value == null) {
         // There was nothing there, create a new value
-        value = currentAmount;
+        value = Long.valueOf(currentAmount);
       } else {
         value += currentAmount;
         successfulCoalescings.incrementAndGet();
@@ -276,7 +314,7 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
   /**
    * This method samples the incoming requests and, if selected, will check if
    * the corePoolSize should be changed.
-   * @param countersMapSize the size of the counters map
+   * @param countersMapSize a given integer.
    */
   private void dynamicallySetCoreSize(int countersMapSize) {
     // Here we are using countersMapSize as a random number, meaning this
@@ -286,8 +324,8 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
     }
     double currentRatio = (double) countersMapSize / (double) maxQueueSize;
     int newValue;
-
     if (currentRatio < 0.1) {
+      // it's 1
       newValue = 1;
     } else if (currentRatio < 0.3) {
       newValue = 2;
@@ -300,7 +338,6 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
     } else {
       newValue = 22;
     }
-
     if (pool.getCorePoolSize() != newValue) {
       pool.setCorePoolSize(newValue);
     }
@@ -376,4 +413,5 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
   public long getCountersMapSize() {
     return countersMap.size();
   }
+
 }
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java
index 843c396..aabca77 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java
@@ -26,8 +26,8 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.thrift.CallQueue.Call;
 import org.apache.hadoop.hbase.util.Threads;
 import org.apache.thrift.TException;
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java
new file mode 100644
index 0000000..a46cae5
--- /dev/null
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java
@@ -0,0 +1,1290 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.thrift;
+
+import static org.apache.hadoop.hbase.thrift.Constants.COALESCE_INC_KEY;
+import static org.apache.hadoop.hbase.util.Bytes.getBytes;
+
+import com.google.common.base.Throwables;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MetaTableAccessor;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableNotFoundException;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.Append;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Increment;
+import org.apache.hadoop.hbase.client.OperationWithAttributes;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionLocator;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.ParseFilter;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
+import org.apache.hadoop.hbase.filter.WhileMatchFilter;
+import org.apache.hadoop.hbase.security.UserProvider;
+import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
+import org.apache.hadoop.hbase.thrift.generated.BatchMutation;
+import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
+import org.apache.hadoop.hbase.thrift.generated.Hbase;
+import org.apache.hadoop.hbase.thrift.generated.IOError;
+import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
+import org.apache.hadoop.hbase.thrift.generated.Mutation;
+import org.apache.hadoop.hbase.thrift.generated.TAppend;
+import org.apache.hadoop.hbase.thrift.generated.TCell;
+import org.apache.hadoop.hbase.thrift.generated.TIncrement;
+import org.apache.hadoop.hbase.thrift.generated.TRegionInfo;
+import org.apache.hadoop.hbase.thrift.generated.TRowResult;
+import org.apache.hadoop.hbase.thrift.generated.TScan;
+import org.apache.hadoop.hbase.thrift.generated.TThriftServerType;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.thrift.TException;
+
+/**
+ * The HBaseServiceHandler is a glue object that connects Thrift RPC calls to the
+ * HBase client API primarily defined in the Admin and Table objects.
+ */
+@InterfaceAudience.Private
+@SuppressWarnings("deprecation")
+public class ThriftHBaseServiceHandler extends HBaseServiceHandler implements Hbase.Iface {
+  private static final Log LOG = LogFactory.getLog(ThriftHBaseServiceHandler.class);
+
+  public static final int HREGION_VERSION = 1;
+
+  // nextScannerId and scannerMap are used to manage scanner state
+  private int nextScannerId = 0;
+  private HashMap<Integer, ResultScannerWrapper> scannerMap;
+  IncrementCoalescer coalescer;
+
+  /**
+   * Returns a list of all the column families for a given Table.
+   *
+   * @param table
+   * @throws IOException
+   */
+  byte[][] getAllColumns(Table table) throws IOException {
+    HColumnDescriptor[] cds = table.getTableDescriptor().getColumnFamilies();
+    byte[][] columns = new byte[cds.length][];
+    for (int i = 0; i < cds.length; i++) {
+      columns[i] = Bytes.add(cds[i].getName(),
+          KeyValue.COLUMN_FAMILY_DELIM_ARRAY);
+    }
+    return columns;
+  }
+
+  /**
+   * Assign a unique ID to the scanner and adds the mapping to an internal
+   * hash-map
+   *
+   * @param scanner the {@link ResultScanner} to add
+   * @return integer scanner id
+   */
+  protected synchronized int addScanner(ResultScanner scanner, boolean sortColumns) {
+    int id = nextScannerId++;
+    ResultScannerWrapper resultScannerWrapper =
+        new ResultScannerWrapper(scanner, sortColumns);
+    scannerMap.put(id, resultScannerWrapper);
+    return id;
+  }
+
+
+  /**
+   * Returns the scanner associated with the specified ID.
+   *
+   * @param id the ID of the scanner to get
+   * @return a Scanner, or null if ID was invalid.
+   */
+  private synchronized ResultScannerWrapper getScanner(int id) {
+    return scannerMap.get(id);
+  }
+
+  /**
+   * Removes the scanner associated with the specified ID from the internal
+   * id-&gt;scanner hash-map
+   *
+   * @param id the ID of the scanner to remove
+   * @return a Scanner, or null if ID was invalid.
+   */
+  private synchronized ResultScannerWrapper removeScanner(int id) {
+    return scannerMap.remove(id);
+  }
+
+  protected ThriftHBaseServiceHandler(final Configuration c,
+      final UserProvider userProvider) throws IOException {
+    super(c, userProvider);
+    scannerMap = new HashMap<>();
+    this.coalescer = new IncrementCoalescer(this);
+  }
+
+  @Override
+  public void enableTable(ByteBuffer tableName) throws IOError {
+    try {
+      getAdmin().enableTable(getTableName(tableName));
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    }
+  }
+
+  @Override
+  public void disableTable(ByteBuffer tableName) throws IOError, TException {
+    try {
+      getAdmin().disableTable(TableName.valueOf(Bytes.getBytes(tableName)));
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    }
+  }
+
+  @Override
+  public boolean isTableEnabled(ByteBuffer tableName) throws IOError {
+    try {
+      return this.connectionCache.getAdmin().isTableEnabled(getTableName(tableName));
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    }
+  }
+
+  // ThriftServerRunner.compact should be deprecated and replaced with methods specific to
+  // table and region
+  @Override
+  public void compact(ByteBuffer tableNameOrRegionName) throws IOError {
+    try {
+      try {
+        getAdmin().compactRegion(getBytes(tableNameOrRegionName));
+      } catch (IllegalArgumentException e) {
+        // Invalid region, try table
+        getAdmin().compact(TableName.valueOf(getBytes(tableNameOrRegionName)));
+      }
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    }
+  }
+
+  // ThriftServerRunner.majorCompact should be deprecated and replaced with methods specific
+  // to table and region
+  @Override
+  public void majorCompact(ByteBuffer tableNameOrRegionName) throws IOError {
+    try {
+      try {
+        getAdmin().compactRegion(getBytes(tableNameOrRegionName));
+      } catch (IllegalArgumentException e) {
+        // Invalid region, try table
+        getAdmin().compact(TableName.valueOf(getBytes(tableNameOrRegionName)));
+      }
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    }
+  }
+
+  @Override
+  public List<ByteBuffer> getTableNames() throws IOError {
+    try {
+      TableName[] tableNames = this.getAdmin().listTableNames();
+      ArrayList<ByteBuffer> list = new ArrayList<>(tableNames.length);
+      for (TableName tableName : tableNames) {
+        list.add(ByteBuffer.wrap(tableName.getName()));
+      }
+      return list;
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    }
+  }
+
+  /**
+   * @return the list of regions in the given table, or an empty list if the table does not exist
+   */
+  @Override
+  public List<TRegionInfo> getTableRegions(ByteBuffer tableName) throws IOError {
+    try (RegionLocator locator = connectionCache.getRegionLocator(getBytes(tableName))) {
+      List<HRegionLocation> regionLocations = locator.getAllRegionLocations();
+      List<TRegionInfo> results = new ArrayList<>(regionLocations.size());
+      for (HRegionLocation regionLocation : regionLocations) {
+        HRegionInfo info = regionLocation.getRegionInfo();
+        ServerName serverName = regionLocation.getServerName();
+        TRegionInfo region = new TRegionInfo();
+        region.serverName = ByteBuffer.wrap(Bytes.toBytes(serverName.getHostname()));
+        region.port = serverName.getPort();
+        region.startKey = ByteBuffer.wrap(info.getStartKey());
+        region.endKey = ByteBuffer.wrap(info.getEndKey());
+        region.id = info.getRegionId();
+        region.name = ByteBuffer.wrap(info.getRegionName());
+        region.version = HREGION_VERSION; // HRegion now not versioned, PB encoding used
+        results.add(region);
+      }
+      return results;
+    } catch (TableNotFoundException e) {
+      return Collections.emptyList();
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    }
+  }
+
+  @Override
+  public List<TCell> get(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
+      Map<ByteBuffer, ByteBuffer> attributes)
+      throws IOError {
+    byte[][] famAndQf = KeyValue.parseColumn(getBytes(column));
+    if (famAndQf.length == 1) {
+      return get(tableName, row, famAndQf[0], null, attributes);
+    }
+    if (famAndQf.length == 2) {
+      return get(tableName, row, famAndQf[0], famAndQf[1], attributes);
+    }
+    throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
+  }
+
+  /**
+   * Note: this internal interface is slightly different from public APIs in regard to handling
+   * of the qualifier. Here we differ from the public Java API in that null != byte[0]. Rather,
+   * we respect qual == null as a request for the entire column family. The caller (
+   * {@link #get(ByteBuffer, ByteBuffer, ByteBuffer, Map)}) interface IS consistent in that the
+   * column is parse like normal.
+   */
+  protected List<TCell> get(ByteBuffer tableName,
+      ByteBuffer row,
+      byte[] family,
+      byte[] qualifier,
+      Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      Get get = new Get(getBytes(row));
+      addAttributes(get, attributes);
+      if (qualifier == null) {
+        get.addFamily(family);
+      } else {
+        get.addColumn(family, qualifier);
+      }
+      Result result = table.get(get);
+      return ThriftUtilities.cellFromHBase(result.rawCells());
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally {
+      closeTable(table);
+    }
+  }
+
+  /**
+   * Note: this public interface is slightly different from public Java APIs in regard to
+   * handling of the qualifier. Here we differ from the public Java API in that null != byte[0].
+   * Rather, we respect qual == null as a request for the entire column family. If you want to
+   * access the entire column family, use
+   * {@link #getVer(ByteBuffer, ByteBuffer, ByteBuffer, int, Map)} with a {@code column} value
+   * that lacks a {@code ':'}.
+   */
+  public List<TCell> getVer(ByteBuffer tableName, ByteBuffer row, byte[] family, byte[] qualifier,
+      int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      Get get = new Get(getBytes(row));
+      addAttributes(get, attributes);
+      if (null == qualifier) {
+        get.addFamily(family);
+      } else {
+        get.addColumn(family, qualifier);
+      }
+      get.setMaxVersions(numVersions);
+      Result result = table.get(get);
+      return ThriftUtilities.cellFromHBase(result.rawCells());
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally {
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public List<TCell> getVer(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
+      int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    byte[][] famAndQf = KeyValue.parseColumn(getBytes(column));
+    if (famAndQf.length == 1) {
+      return getVer(tableName, row, famAndQf[0], null, numVersions, attributes);
+    }
+    if (famAndQf.length == 2) {
+      return getVer(tableName, row, famAndQf[0], famAndQf[1], numVersions, attributes);
+    }
+    throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
+  }
+
+  @Override
+  public List<TCell> getVerTs(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
+    long timestamp, int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
+    if (famAndQf.length == 1) {
+      return getVerTs(tableName, row, famAndQf[0], null, timestamp, numVersions, attributes);
+    }
+    if (famAndQf.length == 2) {
+      return getVerTs(tableName, row, famAndQf[0], famAndQf[1], timestamp, numVersions,
+        attributes);
+    }
+    throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
+  }
+
+  /**
+    * Note: this internal interface is slightly different from public APIs in regard to handling
+    * of the qualifier. Here we differ from the public Java API in that null != byte[0]. Rather,
+    * we respect qual == null as a request for the entire column family. The caller (
+    * {@link #getVerTs(ByteBuffer, ByteBuffer, ByteBuffer, long, int, Map)}) interface IS
+    * consistent in that the column is parse like normal.
+    */
+  protected List<TCell> getVerTs(ByteBuffer tableName, ByteBuffer row, byte[] family,
+    byte[] qualifier, long timestamp, int numVersions, Map<ByteBuffer, ByteBuffer> attributes)
+    throws IOError {
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      Get get = new Get(getBytes(row));
+      addAttributes(get, attributes);
+      if (null == qualifier) {
+        get.addFamily(family);
+      } else {
+        get.addColumn(family, qualifier);
+      }
+      get.setTimeRange(0, timestamp);
+      get.setMaxVersions(numVersions);
+      Result result = table.get(get);
+      return ThriftUtilities.cellFromHBase(result.rawCells());
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally {
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public List<TRowResult> getRow(ByteBuffer tableName, ByteBuffer row,
+    Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    return getRowWithColumnsTs(tableName, row, null,
+      HConstants.LATEST_TIMESTAMP,
+      attributes);
+  }
+
+  @Override
+  public List<TRowResult> getRowWithColumns(ByteBuffer tableName,
+    ByteBuffer row,
+    List<ByteBuffer> columns,
+    Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    return getRowWithColumnsTs(tableName, row, columns,
+      HConstants.LATEST_TIMESTAMP,attributes);
+  }
+
+  @Override
+  public List<TRowResult> getRowTs(ByteBuffer tableName, ByteBuffer row,
+      long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    return getRowWithColumnsTs(tableName, row, null,
+        timestamp, attributes);
+  }
+
+  @Override
+  public List<TRowResult> getRowWithColumnsTs(
+      ByteBuffer tableName, ByteBuffer row, List<ByteBuffer> columns,
+      long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      if (columns == null) {
+        Get get = new Get(getBytes(row));
+        addAttributes(get, attributes);
+        get.setTimeRange(0, timestamp);
+        Result result = table.get(get);
+        return ThriftUtilities.rowResultFromHBase(result);
+      }
+      Get get = new Get(getBytes(row));
+      addAttributes(get, attributes);
+      for (ByteBuffer column: columns) {
+        byte[][] famAndQf = KeyValue.parseColumn(getBytes(column));
+        if (famAndQf.length == 1) {
+          get.addFamily(famAndQf[0]);
+        } else {
+          get.addColumn(famAndQf[0], famAndQf[1]);
+        }
+      }
+      get.setTimeRange(0, timestamp);
+      Result result = table.get(get);
+      return ThriftUtilities.rowResultFromHBase(result);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally {
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public List<TRowResult> getRows(ByteBuffer tableName,
+      List<ByteBuffer> rows,
+      Map<ByteBuffer, ByteBuffer> attributes)
+      throws IOError {
+    return getRowsWithColumnsTs(tableName, rows, null,
+      HConstants.LATEST_TIMESTAMP, attributes);
+  }
+
+  @Override
+  public List<TRowResult> getRowsWithColumns(ByteBuffer tableName,
+      List<ByteBuffer> rows,
+      List<ByteBuffer> columns,
+      Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    return getRowsWithColumnsTs(tableName, rows, columns,
+      HConstants.LATEST_TIMESTAMP, attributes);
+  }
+
+  @Override
+  public List<TRowResult> getRowsTs(ByteBuffer tableName,
+      List<ByteBuffer> rows,
+      long timestamp,
+      Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    return getRowsWithColumnsTs(tableName, rows, null,
+      timestamp, attributes);
+  }
+
+  @Override
+  public List<TRowResult> getRowsWithColumnsTs(ByteBuffer tableName,
+      List<ByteBuffer> rows,
+      List<ByteBuffer> columns, long timestamp,
+      Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    Table table = null;
+    try {
+      List<Get> gets = new ArrayList<>(rows.size());
+      table = getTable(tableName);
+      if (metrics != null) {
+        metrics.incNumRowKeysInBatchGet(rows.size());
+      }
+      for (ByteBuffer row: rows) {
+        Get get = new Get(getBytes(row));
+        addAttributes(get, attributes);
+        if (columns != null) {
+          for (ByteBuffer column: columns) {
+            byte[][] famAndQf = KeyValue.parseColumn(getBytes(column));
+            if (famAndQf.length == 1) {
+              get.addFamily(famAndQf[0]);
+            } else {
+              get.addColumn(famAndQf[0], famAndQf[1]);
+            }
+          }
+        }
+        get.setTimeRange(0, timestamp);
+        gets.add(get);
+      }
+      Result[] result = table.get(gets);
+      return ThriftUtilities.rowResultFromHBase(result);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally {
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public void deleteAll(
+      ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
+      Map<ByteBuffer, ByteBuffer> attributes)
+      throws IOError {
+    deleteAllTs(tableName, row, column, HConstants.LATEST_TIMESTAMP, attributes);
+  }
+
+  @Override
+  public void deleteAllTs(ByteBuffer tableName,
+      ByteBuffer row,
+      ByteBuffer column,
+      long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      Delete delete = new Delete(getBytes(row));
+      addAttributes(delete, attributes);
+      byte[][] famAndQf = KeyValue.parseColumn(getBytes(column));
+      if (famAndQf.length == 1) {
+        delete.addFamily(famAndQf[0], timestamp);
+      } else {
+        delete.addColumns(famAndQf[0], famAndQf[1], timestamp);
+      }
+      table.delete(delete);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally {
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public void deleteAllRow(
+      ByteBuffer tableName, ByteBuffer row,
+      Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    deleteAllRowTs(tableName, row, HConstants.LATEST_TIMESTAMP, attributes);
+  }
+
+  @Override
+  public void deleteAllRowTs(
+      ByteBuffer tableName, ByteBuffer row, long timestamp,
+      Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      Delete delete = new Delete((getBytes(row)), timestamp);
+      addAttributes(delete, attributes);
+      table.delete(delete);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally {
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public void createTable(ByteBuffer in_tableName,
+      List<ColumnDescriptor> columnFamilies) throws IOError, IllegalArgument,AlreadyExists {
+    TableName tableName = getTableName(in_tableName);
+    try {
+      if (getAdmin().tableExists(tableName)) {
+        throw new AlreadyExists("table name already in use");
+      }
+      HTableDescriptor desc = new HTableDescriptor(tableName);
+      for (ColumnDescriptor col: columnFamilies) {
+        HColumnDescriptor colDesc = ThriftUtilities.colDescFromThrift(col);
+        desc.addFamily(colDesc);
+      }
+      getAdmin().createTable(desc);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } catch (IllegalArgumentException e) {
+      LOG.warn(e.getMessage(), e);
+      throw new IllegalArgument(Throwables.getStackTraceAsString(e));
+    }
+  }
+
+  private static TableName getTableName(ByteBuffer buffer) {
+    return TableName.valueOf(getBytes(buffer));
+  }
+
+  @Override
+  public void deleteTable(ByteBuffer in_tableName) throws IOError {
+    TableName tableName = getTableName(in_tableName);
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("deleteTable: table= " + tableName);
+    }
+    try {
+      if (!getAdmin().tableExists(tableName)) {
+        throw new IOException("table does not exists.");
+      }
+      getAdmin().deleteTable(tableName);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    }
+  }
+
+  @Override
+  public void mutateRow(ByteBuffer tableName, ByteBuffer row,
+    List<Mutation> mutations, Map<ByteBuffer, ByteBuffer> attributes)
+    throws IOError, IllegalArgument {
+    mutateRowTs(tableName, row, mutations, HConstants.LATEST_TIMESTAMP, attributes);
+  }
+
+  @Override
+  public void mutateRowTs(ByteBuffer tableName, ByteBuffer row,
+      List<Mutation> mutations, long timestamp,
+      Map<ByteBuffer, ByteBuffer> attributes)
+      throws IOError, IllegalArgument {
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      Put put = new Put(getBytes(row), timestamp);
+      addAttributes(put, attributes);
+      Delete delete = new Delete(getBytes(row));
+      addAttributes(delete, attributes);
+      if (metrics != null) {
+        metrics.incNumRowKeysInBatchMutate(mutations.size());
+      }
+      for (Mutation m: mutations) {
+        byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column));
+        if (m.isDelete) {
+          if (famAndQf.length == 1) {
+            delete.addFamily(famAndQf[0], timestamp);
+          } else {
+            delete.addColumn(famAndQf[0], famAndQf[1], timestamp);
+          }
+          delete.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
+        } else {
+          if (famAndQf.length == 1) {
+            LOG.warn("No column qualifier specified. Delete is the only mutation supported "
+              + "over the whole column family.");
+          } else {
+            put.addImmutable(famAndQf[0], famAndQf[1],
+                m.value != null ? getBytes(m.value)
+                    : HConstants.EMPTY_BYTE_ARRAY);
+          }
+          put.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
+        }
+      }
+      if (!delete.isEmpty()) {
+        table.delete(delete);
+      }
+      if (!put.isEmpty()) {
+        table.put(put);
+      }
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } catch (IllegalArgumentException e) {
+      LOG.warn(e.getMessage(), e);
+      throw new IllegalArgument(Throwables.getStackTraceAsString(e));
+    } finally {
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public void mutateRows(ByteBuffer tableName, List<BatchMutation> rowBatches,
+      Map<ByteBuffer, ByteBuffer> attributes)
+      throws TException {
+    mutateRowsTs(tableName, rowBatches, HConstants.LATEST_TIMESTAMP, attributes);
+  }
+
+  @Override
+  public void mutateRowsTs(
+      ByteBuffer tableName, List<BatchMutation> rowBatches, long timestamp,
+      Map<ByteBuffer, ByteBuffer> attributes)
+      throws IOError, IllegalArgument, TException {
+    List<Put> puts = new ArrayList<Put>();
+    List<Delete> deletes = new ArrayList<Delete>();
+
+    for (BatchMutation batch : rowBatches) {
+      byte[] row = getBytes(batch.row);
+      List<Mutation> mutations = batch.mutations;
+      Delete delete = new Delete(row);
+      addAttributes(delete, attributes);
+      Put put = new Put(row, timestamp);
+      addAttributes(put, attributes);
+      for (Mutation m : mutations) {
+        byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column));
+        if (m.isDelete) {
+          // no qualifier, family only.
+          if (famAndQf.length == 1) {
+            delete.deleteFamily(famAndQf[0], timestamp);
+          } else {
+            delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
+          }
+          delete.setDurability(m.writeToWAL ? Durability.SYNC_WAL
+              : Durability.SKIP_WAL);
+        } else {
+          if (famAndQf.length == 1) {
+            LOG.warn("No column qualifier specified. Delete is the only mutation supported "
+                + "over the whole column family.");
+          }
+          if (famAndQf.length == 2) {
+            put.addImmutable(famAndQf[0], famAndQf[1],
+                m.value != null ? getBytes(m.value) : HConstants.EMPTY_BYTE_ARRAY);
+          } else {
+            throw new IllegalArgumentException("Invalid famAndQf provided.");
+          }
+          put.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
+        }
+      }
+      if (!delete.isEmpty()) {
+        deletes.add(delete);
+      }
+      if (!put.isEmpty()) {
+        puts.add(put);
+      }
+    }
+
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      if (!puts.isEmpty()) {
+        table.put(puts);
+      }
+      if (!deletes.isEmpty()) {
+        table.delete(deletes);
+      }
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } catch (IllegalArgumentException e) {
+      LOG.warn(e.getMessage(), e);
+      throw new IllegalArgument(Throwables.getStackTraceAsString(e));
+    } finally{
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public long atomicIncrement(
+      ByteBuffer tableName, ByteBuffer row, ByteBuffer column, long amount)
+      throws IOError, IllegalArgument, TException {
+    byte[][] famAndQf = KeyValue.parseColumn(getBytes(column));
+    if (famAndQf.length == 1) {
+      return atomicIncrement(tableName, row, famAndQf[0], HConstants.EMPTY_BYTE_ARRAY, amount);
+    }
+    return atomicIncrement(tableName, row, famAndQf[0], famAndQf[1], amount);
+  }
+
+  protected long atomicIncrement(ByteBuffer tableName, ByteBuffer row,
+      byte[] family, byte[] qualifier, long amount)
+      throws IOError, IllegalArgument, TException {
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      return table.incrementColumnValue(
+          getBytes(row), family, qualifier, amount);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally {
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public List<TRowResult> scannerGetList(int id, int nbRows)
+      throws IllegalArgument, IOError {
+    LOG.debug("scannerGetList: id= " + id);
+    ResultScannerWrapper resultscannerWrapper = getScanner(id);
+    if (null == resultscannerWrapper) {
+      String message = "scanner ID is invalid.";
+      LOG.warn(message);
+      throw new IllegalArgument("scanner ID is invalid.");
+    }
+
+    Result[] results;
+    try {
+      results = resultscannerWrapper.getScanner().next(nbRows);
+      if (null == results) {
+        return new ArrayList<>();
+      }
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    }
+    return ThriftUtilities.rowResultFromHBase(results, resultscannerWrapper.isColumnSorted());
+  }
+
+  @Override
+  public void scannerClose(int id) throws IOError, IllegalArgument {
+    LOG.debug("scannerClose: id= " + id);
+    ResultScannerWrapper resultScannerWrapper = getScanner(id);
+    if (resultScannerWrapper == null) {
+      LOG.warn("scanner ID is invalid.");
+      throw new IllegalArgument("scanner ID is invalid.");
+    }
+    resultScannerWrapper.getScanner().close();
+    removeScanner(id);
+  }
+
+  public List<TCell> getRowOrBefore(ByteBuffer tableName, ByteBuffer row, ByteBuffer family)
+      throws IOError, TException {
+    return null;
+  }
+
+  @Override
+  public List<TRowResult> scannerGet(int id) throws IllegalArgument, IOError {
+    return scannerGetList(id, 1);
+  }
+
+  @Override
+  public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
+      Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      Scan scan = new Scan();
+      addAttributes(scan, attributes);
+      if (tScan.isSetStartRow()) {
+        scan.setStartRow(tScan.getStartRow());
+      }
+      if (tScan.isSetStopRow()) {
+        scan.setStopRow(tScan.getStopRow());
+      }
+      if (tScan.isSetTimestamp()) {
+        scan.setTimeRange(0, tScan.getTimestamp());
+      }
+      if (tScan.isSetCaching()) {
+        scan.setCaching(tScan.getCaching());
+      }
+      if (tScan.isSetBatchSize()) {
+        scan.setBatch(tScan.getBatchSize());
+      }
+      if (tScan.isSetColumns() && !tScan.getColumns().isEmpty()) {
+        for(ByteBuffer column : tScan.getColumns()) {
+          byte [][] famQf = KeyValue.parseColumn(getBytes(column));
+          if(famQf.length == 1) {
+            scan.addFamily(famQf[0]);
+          } else {
+            scan.addColumn(famQf[0], famQf[1]);
+          }
+        }
+      }
+      if (tScan.isSetFilterString()) {
+        ParseFilter parseFilter = new ParseFilter();
+        scan.setFilter(
+            parseFilter.parseFilterString(tScan.getFilterString()));
+      }
+      if (tScan.isSetReversed()) {
+        scan.setReversed(tScan.isReversed());
+      }
+      if (tScan.isSetCacheBlocks()) {
+        scan.setCacheBlocks(tScan.isCacheBlocks());
+      }
+      return addScanner(table.getScanner(scan), tScan.sortColumns);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally{
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public int scannerOpen(ByteBuffer tableName, ByteBuffer startRow,
+      List<ByteBuffer> columns,
+      Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
+
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      Scan scan = new Scan(getBytes(startRow));
+      addAttributes(scan, attributes);
+      if(columns != null && !columns.isEmpty()) {
+        for(ByteBuffer column : columns) {
+          byte [][] famQf = KeyValue.parseColumn(getBytes(column));
+          if(famQf.length == 1) {
+            scan.addFamily(famQf[0]);
+          } else {
+            scan.addColumn(famQf[0], famQf[1]);
+          }
+        }
+      }
+      return addScanner(table.getScanner(scan), false);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally{
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public int scannerOpenWithStop(ByteBuffer tableName, ByteBuffer startRow,
+      ByteBuffer stopRow, List<ByteBuffer> columns,
+      Map<ByteBuffer, ByteBuffer> attributes)
+      throws IOError, TException {
+
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      Scan scan = new Scan(getBytes(startRow), getBytes(stopRow));
+      addAttributes(scan, attributes);
+      if(columns != null && !columns.isEmpty()) {
+        for(ByteBuffer column : columns) {
+          byte [][] famQf = KeyValue.parseColumn(getBytes(column));
+          if(famQf.length == 1) {
+            scan.addFamily(famQf[0]);
+          } else {
+            scan.addColumn(famQf[0], famQf[1]);
+          }
+        }
+      }
+      return addScanner(table.getScanner(scan), false);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally{
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public int scannerOpenWithPrefix(ByteBuffer tableName,
+      ByteBuffer startAndPrefix,
+      List<ByteBuffer> columns,
+      Map<ByteBuffer, ByteBuffer> attributes)
+      throws IOError, TException {
+
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      Scan scan = new Scan(getBytes(startAndPrefix));
+      addAttributes(scan, attributes);
+      Filter f = new WhileMatchFilter(
+          new PrefixFilter(getBytes(startAndPrefix)));
+      scan.setFilter(f);
+      if (columns != null && !columns.isEmpty()) {
+        for(ByteBuffer column : columns) {
+          byte [][] famQf = KeyValue.parseColumn(getBytes(column));
+          if(famQf.length == 1) {
+            scan.addFamily(famQf[0]);
+          } else {
+            scan.addColumn(famQf[0], famQf[1]);
+          }
+        }
+      }
+      return addScanner(table.getScanner(scan), false);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally{
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public int scannerOpenTs(ByteBuffer tableName, ByteBuffer startRow,
+      List<ByteBuffer> columns, long timestamp,
+      Map<ByteBuffer, ByteBuffer> attributes) throws IOError, TException {
+
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      Scan scan = new Scan(getBytes(startRow));
+      addAttributes(scan, attributes);
+      scan.setTimeRange(0, timestamp);
+      if (columns != null && !columns.isEmpty()) {
+        for (ByteBuffer column : columns) {
+          byte [][] famQf = KeyValue.parseColumn(getBytes(column));
+          if(famQf.length == 1) {
+            scan.addFamily(famQf[0]);
+          } else {
+            scan.addColumn(famQf[0], famQf[1]);
+          }
+        }
+      }
+      return addScanner(table.getScanner(scan), false);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally{
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public int scannerOpenWithStopTs(ByteBuffer tableName, ByteBuffer startRow,
+      ByteBuffer stopRow, List<ByteBuffer> columns, long timestamp,
+      Map<ByteBuffer, ByteBuffer> attributes) throws IOError, TException {
+
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      Scan scan = new Scan(getBytes(startRow), getBytes(stopRow));
+      addAttributes(scan, attributes);
+      scan.setTimeRange(0, timestamp);
+      if (columns != null && !columns.isEmpty()) {
+        for (ByteBuffer column : columns) {
+          byte [][] famQf = KeyValue.parseColumn(getBytes(column));
+          if(famQf.length == 1) {
+            scan.addFamily(famQf[0]);
+          } else {
+            scan.addColumn(famQf[0], famQf[1]);
+          }
+        }
+      }
+      scan.setTimeRange(0, timestamp);
+      return addScanner(table.getScanner(scan), false);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally{
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public Map<ByteBuffer, ColumnDescriptor> getColumnDescriptors(
+      ByteBuffer tableName) throws IOError, TException {
+    Table table = null;
+    try {
+      TreeMap<ByteBuffer, ColumnDescriptor> columns = new TreeMap<>();
+      table = getTable(tableName);
+      HTableDescriptor desc = table.getTableDescriptor();
+      for (HColumnDescriptor e: desc.getFamilies()) {
+        ColumnDescriptor col = ThriftUtilities.colDescFromHbase(e);
+        columns.put(col.name, col);
+      }
+      return columns;
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally {
+      closeTable(table);
+    }
+  }
+
+  private void closeTable(Table table) throws IOError {
+    try {
+      if (table != null) {
+        table.close();
+      }
+    } catch (IOException e) {
+      LOG.error(e.getMessage(), e);
+      throw getIOError(e);
+    }
+  }
+
+  @Override
+  public TRegionInfo getRegionInfo(ByteBuffer searchRow) throws IOError {
+    try {
+      byte[] row = getBytes(searchRow);
+      Result startRowResult = getReverseScanResult(TableName.META_TABLE_NAME.getName(), row,
+        HConstants.CATALOG_FAMILY);
+
+      if (startRowResult == null) {
+        throw new IOException("Cannot find row in " + TableName.META_TABLE_NAME + ", row="
+            + Bytes.toStringBinary(row));
+      }
+
+      // find region start and end keys
+      HRegionInfo regionInfo = MetaTableAccessor.getHRegionInfo(startRowResult);
+      if (regionInfo == null) {
+        throw new IOException("RegionInfo REGIONINFOR was null or " +
+            " empty in Meta for row=" + Bytes.toStringBinary(row));
+      }
+      TRegionInfo region = new TRegionInfo();
+      region.setStartKey(regionInfo.getStartKey());
+      region.setEndKey(regionInfo.getEndKey());
+      region.id = regionInfo.getRegionId();
+      region.setName(regionInfo.getRegionName());
+      region.version = HREGION_VERSION; // version not used anymore, PB encoding used.
+      // find region assignment to server
+      ServerName serverName = HRegionInfo.getServerName(startRowResult);
+      if (serverName != null) {
+        region.setServerName(Bytes.toBytes(serverName.getHostname()));
+        region.port = serverName.getPort();
+      }
+      return region;
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    }
+  }
+
+  private Result getReverseScanResult(byte[] tableName, byte[] row, byte[] family)
+    throws IOException {
+    Scan scan = new Scan(row);
+    scan.setReversed(true);
+    scan.addFamily(family);
+    scan.setStartRow(row);
+    try (Table table = getTable(tableName);
+        ResultScanner scanner = table.getScanner(scan)) {
+      return scanner.next();
+    }
+  }
+
+  @Override
+  public void increment(TIncrement tIncrement) throws IOError, TException {
+    if (tIncrement.getRow().length == 0 || tIncrement.getTable().length == 0) {
+      throw new TException("Must supply a table and a row key; can't increment.");
+    }
+
+    if (conf.getBoolean(COALESCE_INC_KEY, false)) {
+      this.coalescer.queueIncrement(tIncrement);
+      return;
+    }
+
+    Table table = null;
+    try {
+      table = getTable(tIncrement.getTable());
+      Increment inc = ThriftUtilities.incrementFromThrift(tIncrement);
+      table.increment(inc);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally {
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public void incrementRows(List<TIncrement> tIncrements) throws IOError, TException {
+    if (conf.getBoolean(COALESCE_INC_KEY, false)) {
+      this.coalescer.queueIncrements(tIncrements);
+      return;
+    }
+    for (TIncrement tinc: tIncrements) {
+      increment(tinc);
+    }
+  }
+
+  @Override
+  public List<TCell> append(TAppend tAppend) throws IOError, TException {
+    if (tAppend.getRow().length == 0 || tAppend.getTable().length == 0) {
+      throw new TException("Must supply a table and a row key; can't append.");
+    }
+
+    Table table = null;
+    try {
+      table = getTable(tAppend.getTable());
+      Append append = ThriftUtilities.appendFromThrift(tAppend);
+      Result result = table.append(append);
+      return ThriftUtilities.cellFromHBase(result.rawCells());
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } finally {
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public boolean checkAndPut(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
+    ByteBuffer value, Mutation mput, Map<ByteBuffer, ByteBuffer> attributes) throws TException {
+    Put put;
+    try {
+      put = new Put(getBytes(row), HConstants.LATEST_TIMESTAMP);
+      addAttributes(put, attributes);
+
+      byte[][] famAndQf = KeyValue.parseColumn(getBytes(mput.column));
+
+      put.addImmutable(famAndQf[0], famAndQf[1], mput.value != null ? getBytes(mput.value)
+          : HConstants.EMPTY_BYTE_ARRAY);
+
+      put.setDurability(mput.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
+    } catch (IllegalArgumentException e) {
+      LOG.warn(e.getMessage(), e);
+      throw new IllegalArgument(Throwables.getStackTraceAsString(e));
+    }
+
+    Table table = null;
+    try {
+      table = getTable(tableName);
+      byte[][] famAndQf = KeyValue.parseColumn(getBytes(column));
+      return table.checkAndPut(getBytes(row), famAndQf[0], famAndQf[1],
+          value != null ? getBytes(value) : HConstants.EMPTY_BYTE_ARRAY, put);
+    } catch (IOException e) {
+      LOG.warn(e.getMessage(), e);
+      throw getIOError(e);
+    } catch (IllegalArgumentException e) {
+      LOG.warn(e.getMessage(), e);
+      throw new IllegalArgument(Throwables.getStackTraceAsString(e));
+    } finally {
+      closeTable(table);
+    }
+  }
+
+  @Override
+  public TThriftServerType getThriftServerType() throws TException {
+    return TThriftServerType.ONE;
+  }
+
+  private static IOError getIOError(Throwable throwable) {
+    IOError error = new IOErrorWithCause(throwable);
+    error.setMessage(Throwables.getStackTraceAsString(throwable));
+    return error;
+  }
+
+  /**
+   * Adds all the attributes into the Operation object
+   */
+  private static void addAttributes(OperationWithAttributes op,
+      Map<ByteBuffer, ByteBuffer> attributes) {
+    if (attributes == null || attributes.isEmpty()) {
+      return;
+    }
+    for (Map.Entry<ByteBuffer, ByteBuffer> entry: attributes.entrySet()) {
+      String name = Bytes.toStringBinary(getBytes(entry.getKey()));
+      byte[] value = getBytes(entry.getValue());
+      op.setAttribute(name, value);
+    }
+  }
+
+  protected static class ResultScannerWrapper {
+    private final ResultScanner scanner;
+    private final boolean sortColumns;
+    public ResultScannerWrapper(ResultScanner resultScanner,
+      boolean sortResultColumns) {
+      scanner = resultScanner;
+      sortColumns = sortResultColumns;
+    }
+
+    public ResultScanner getScanner() {
+      return scanner;
+    }
+
+    public boolean isColumnSorted() {
+      return sortColumns;
+    }
+  }
+
+  public static class IOErrorWithCause extends IOError {
+    private final Throwable cause;
+    public IOErrorWithCause(Throwable cause) {
+      this.cause = cause;
+    }
+
+    @Override
+    public synchronized Throwable getCause() {
+      return cause;
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (super.equals(other) && other instanceof IOErrorWithCause) {
+        Throwable otherCause = ((IOErrorWithCause) other).getCause();
+        if (this.getCause() != null) {
+          return otherCause != null && this.getCause().equals(otherCause);
+        } else {
+          return otherCause == null;
+        }
+      }
+      return false;
+    }
+
+    @Override
+    public int hashCode() {
+      int result = super.hashCode();
+      result = 31 * result + (cause != null ? cause.hashCode() : 0);
+      return result;
+    }
+  }
+}
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java
index 3dfa50a..f50d619 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java
@@ -27,13 +27,13 @@ import javax.servlet.http.HttpServletResponse;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.security.SecurityUtil;
 import org.apache.hadoop.hbase.util.Base64;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.authorize.ProxyUsers;
+import org.apache.http.HttpHeaders;
 import org.apache.thrift.TProcessor;
 import org.apache.thrift.protocol.TProtocolFactory;
 import org.apache.thrift.server.TServlet;
@@ -52,24 +52,22 @@ import org.ietf.jgss.Oid;
 public class ThriftHttpServlet extends TServlet {
   private static final long serialVersionUID = 1L;
   private static final Log LOG = LogFactory.getLog(ThriftHttpServlet.class.getName());
-  private transient final UserGroupInformation realUser;
-  private transient final Configuration conf;
+  private final transient UserGroupInformation serviceUGI;
+  private final transient UserGroupInformation httpUGI;
+  private final transient HBaseServiceHandler handler;
   private final boolean securityEnabled;
   private final boolean doAsEnabled;
-  private transient ThriftServerRunner.HBaseHandler hbaseHandler;
 
   // HTTP Header related constants.
-  public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
-  public static final String AUTHORIZATION = "Authorization";
   public static final String NEGOTIATE = "Negotiate";
 
   public ThriftHttpServlet(TProcessor processor, TProtocolFactory protocolFactory,
-      UserGroupInformation realUser, Configuration conf, ThriftServerRunner.HBaseHandler
-      hbaseHandler, boolean securityEnabled, boolean doAsEnabled) {
+      UserGroupInformation serviceUGI, UserGroupInformation httpUGI, HBaseServiceHandler handler,
+      boolean securityEnabled, boolean doAsEnabled) {
     super(processor, protocolFactory);
-    this.realUser = realUser;
-    this.conf = conf;
-    this.hbaseHandler = hbaseHandler;
+    this.serviceUGI = serviceUGI;
+    this.httpUGI = httpUGI;
+    this.handler = handler;
     this.securityEnabled = securityEnabled;
     this.doAsEnabled = doAsEnabled;
   }
@@ -79,6 +77,19 @@ public class ThriftHttpServlet extends TServlet {
       throws ServletException, IOException {
     String effectiveUser = request.getRemoteUser();
     if (securityEnabled) {
+      /*
+      Check that the AUTHORIZATION header has any content. If it does not then return a 401
+      requesting AUTHORIZATION header to be sent. This is typical where the first request doesn't
+      send the AUTHORIZATION header initially.
+       */
+      String authHeader = request.getHeader(HttpHeaders.AUTHORIZATION);
+      if (authHeader == null || authHeader.isEmpty()) {
+        // Send a 401 to client
+        response.addHeader(HttpHeaders.WWW_AUTHENTICATE, NEGOTIATE);
+        response.sendError(HttpServletResponse.SC_UNAUTHORIZED);
+        return;
+      }
+
       try {
         // As Thrift HTTP transport doesn't support SPNEGO yet (THRIFT-889),
         // Kerberos authentication is being done at servlet level.
@@ -86,20 +97,22 @@ public class ThriftHttpServlet extends TServlet {
         effectiveUser = identity.principal;
         // It is standard for client applications expect this header.
         // Please see http://tools.ietf.org/html/rfc4559 for more details.
-        response.addHeader(WWW_AUTHENTICATE,  NEGOTIATE + " " + identity.outToken);
+        response.addHeader(HttpHeaders.WWW_AUTHENTICATE,  NEGOTIATE + " " + identity.outToken);
       } catch (HttpAuthenticationException e) {
         LOG.error("Kerberos Authentication failed", e);
         // Send a 401 to the client
         response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
-        response.addHeader(WWW_AUTHENTICATE, NEGOTIATE);
+        response.addHeader(HttpHeaders.WWW_AUTHENTICATE, NEGOTIATE);
         response.getWriter().println("Authentication Error: " + e.getMessage());
         return;
       }
     }
-    String doAsUserFromQuery = request.getHeader("doAs");
-    if(effectiveUser == null) {
-      effectiveUser = realUser.getShortUserName();
+
+    if (effectiveUser == null) {
+      effectiveUser = serviceUGI.getShortUserName();
     }
+
+    String doAsUserFromQuery = request.getHeader("doAs");
     if (doAsUserFromQuery != null) {
       if (!doAsEnabled) {
         throw new ServletException("Support for proxyuser is not configured");
@@ -112,13 +125,13 @@ public class ThriftHttpServlet extends TServlet {
           remoteUser);
       // validate the proxy user authorization
       try {
-        ProxyUsers.authorize(ugi, request.getRemoteAddr(), conf);
+        ProxyUsers.authorize(ugi, request.getRemoteAddr());
       } catch (AuthorizationException e) {
-        throw new ServletException(e.getMessage());
+        throw new ServletException(e);
       }
       effectiveUser = doAsUserFromQuery;
     }
-    hbaseHandler.setEffectiveUser(effectiveUser);
+    handler.setEffectiveUser(effectiveUser);
     super.doPost(request, response);
   }
 
@@ -129,9 +142,9 @@ public class ThriftHttpServlet extends TServlet {
    */
   private RemoteUserIdentity doKerberosAuth(HttpServletRequest request)
       throws HttpAuthenticationException {
-    HttpKerberosServerAction action = new HttpKerberosServerAction(request, realUser);
+    HttpKerberosServerAction action = new HttpKerberosServerAction(request, httpUGI);
     try {
-      String principal = realUser.doAs(action);
+      String principal = httpUGI.doAs(action);
       return new RemoteUserIdentity(principal, action.outToken);
     } catch (Exception e) {
       LOG.error("Failed to perform authentication");
@@ -154,12 +167,12 @@ public class ThriftHttpServlet extends TServlet {
   }
 
   private static class HttpKerberosServerAction implements PrivilegedExceptionAction<String> {
-    HttpServletRequest request;
-    UserGroupInformation serviceUGI;
+    final HttpServletRequest request;
+    final UserGroupInformation httpUGI;
     String outToken = null;
-    HttpKerberosServerAction(HttpServletRequest request, UserGroupInformation serviceUGI) {
+    HttpKerberosServerAction(HttpServletRequest request, UserGroupInformation httpUGI) {
       this.request = request;
-      this.serviceUGI = serviceUGI;
+      this.httpUGI = httpUGI;
     }
 
     @Override
@@ -167,7 +180,7 @@ public class ThriftHttpServlet extends TServlet {
       // Get own Kerberos credentials for accepting connection
       GSSManager manager = GSSManager.getInstance();
       GSSContext gssContext = null;
-      String serverPrincipal = SecurityUtil.getPrincipalWithoutRealm(serviceUGI.getUserName());
+      String serverPrincipal = SecurityUtil.getPrincipalWithoutRealm(httpUGI.getUserName());
       try {
         // This Oid for Kerberos GSS-API mechanism.
         Oid kerberosMechOid = new Oid("1.2.840.113554.1.2.2");
@@ -218,7 +231,7 @@ public class ThriftHttpServlet extends TServlet {
      */
     private String getAuthHeader(HttpServletRequest request)
         throws HttpAuthenticationException {
-      String authHeader = request.getHeader(AUTHORIZATION);
+      String authHeader = request.getHeader(HttpHeaders.AUTHORIZATION);
       // Each http request must have an Authorization header
       if (authHeader == null || authHeader.isEmpty()) {
         throw new HttpAuthenticationException("Authorization header received " +
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftMetrics.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftMetrics.java
index 1f5dc95..58f9fb0 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftMetrics.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftMetrics.java
@@ -19,14 +19,14 @@
 
 package org.apache.hadoop.hbase.thrift;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CallQueueTooBigException;
+import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
 import org.apache.hadoop.hbase.MultiActionResultTooLarge;
 import org.apache.hadoop.hbase.NotServingRegionException;
 import org.apache.hadoop.hbase.RegionTooBusyException;
 import org.apache.hadoop.hbase.UnknownScannerException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
 import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil;
 import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
 import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
@@ -61,11 +61,12 @@ public class ThriftMetrics  {
   public static final String SLOW_RESPONSE_NANO_SEC =
     "hbase.thrift.slow.response.nano.second";
   public static final long DEFAULT_SLOW_RESPONSE_NANO_SEC = 10 * 1000 * 1000;
+  private final ThriftServerType thriftServerType;
 
 
   public ThriftMetrics(Configuration conf, ThriftServerType t) {
     slowResponseTime = conf.getLong( SLOW_RESPONSE_NANO_SEC, DEFAULT_SLOW_RESPONSE_NANO_SEC);
-
+    thriftServerType = t;
     if (t == ThriftServerType.ONE) {
       source = CompatibilitySingletonFactory.getInstance(MetricsThriftServerSourceFactory.class).createThriftOneSource();
     } else if (t == ThriftServerType.TWO) {
@@ -156,4 +157,8 @@ public class ThriftMetrics  {
     }
     return ClientExceptionsUtil.findException(t);
   }
+
+  public ThriftServerType getThriftServerType() {
+    return thriftServerType;
+  }
 }
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
index 560f788..7cf5046 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
@@ -18,24 +18,140 @@
 
 package org.apache.hadoop.hbase.thrift;
 
-import java.util.Arrays;
+import static org.apache.hadoop.hbase.thrift.Constants.BACKLOG_CONF_DEAFULT;
+import static org.apache.hadoop.hbase.thrift.Constants.BACKLOG_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.BIND_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.BIND_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.COMPACT_CONF_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.COMPACT_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.COMPACT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.DEFAULT_BIND_ADDR;
+import static org.apache.hadoop.hbase.thrift.Constants.DEFAULT_HTTP_MAX_HEADER_SIZE;
+import static org.apache.hadoop.hbase.thrift.Constants.DEFAULT_LISTEN_PORT;
+import static org.apache.hadoop.hbase.thrift.Constants.FRAMED_CONF_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.FRAMED_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.FRAMED_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.HTTP_MAX_THREADS_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.HTTP_MAX_THREADS_KEY_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.HTTP_MIN_THREADS_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.HTTP_MIN_THREADS_KEY_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.INFOPORT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.KEEP_ALIVE_SEC_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.MAX_FRAME_SIZE_CONF_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.MAX_FRAME_SIZE_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.MAX_QUEUE_SIZE_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.MAX_WORKERS_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.MIN_WORKERS_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.PORT_CONF_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.PORT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.READ_TIMEOUT_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.SELECTOR_NUM_OPTION;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_DNS_INTERFACE_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_DNS_NAMESERVER_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_FILTERS;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_HTTP_ALLOW_OPTIONS_METHOD;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_HTTP_ALLOW_OPTIONS_METHOD_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_BINDING_ADDRESS;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_BINDING_ADDRESS_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_PORT;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_PORT_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_KERBEROS_PRINCIPAL_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_KEYTAB_FILE_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_QOP_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SELECTOR_NUM;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SPNEGO_KEYTAB_FILE_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SPNEGO_PRINCIPAL_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_ENABLED_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_KEYSTORE_KEYPASSWORD_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_KEYSTORE_PASSWORD_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_KEYSTORE_STORE_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SUPPORT_PROXYUSER_KEY;
+import static org.apache.hadoop.hbase.thrift.Constants.USE_HTTP_CONF_KEY;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
+
+import java.util.ArrayList;
 import java.util.List;
-
+import java.util.Map;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.UnsupportedCallbackException;
+import javax.security.sasl.AuthorizeCallback;
+import javax.security.sasl.SaslServer;
+
+import org.apache.commons.cli.BasicParser;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Options;
-import org.apache.commons.cli.PosixParser;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.filter.ParseFilter;
 import org.apache.hadoop.hbase.http.InfoServer;
-import org.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType;
+import org.apache.hadoop.hbase.jetty.SslSelectChannelConnectorSecure;
+import org.apache.hadoop.hbase.security.SaslUtil;
+import org.apache.hadoop.hbase.security.SecurityUtil;
+import org.apache.hadoop.hbase.security.UserProvider;
+import org.apache.hadoop.hbase.thrift.generated.Hbase;
+
+import org.apache.hadoop.hbase.util.DNS;
+import org.apache.hadoop.hbase.util.HttpServerUtil;
+import org.apache.hadoop.hbase.util.JvmPauseMonitor;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.hbase.util.VersionInfo;
+import org.apache.hadoop.security.SaslRpcServer;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.util.Shell.ExitCodeException;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.thrift.TException;
+import org.apache.thrift.TProcessor;
+import org.apache.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.protocol.TCompactProtocol;
+import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.protocol.TProtocolFactory;
+import org.apache.thrift.server.THsHaServer;
+import org.apache.thrift.server.TNonblockingServer;
+import org.apache.thrift.server.TServer;
+import org.apache.thrift.server.TServlet;
+import org.apache.thrift.server.TThreadedSelectorServer;
+import org.apache.thrift.transport.TFramedTransport;
+import org.apache.thrift.transport.TNonblockingServerSocket;
+import org.apache.thrift.transport.TNonblockingServerTransport;
+import org.apache.thrift.transport.TSaslServerTransport;
+import org.apache.thrift.transport.TServerSocket;
+import org.apache.thrift.transport.TServerTransport;
+import org.apache.thrift.transport.TTransportFactory;
+
+import org.mortbay.jetty.Connector;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.nio.SelectChannelConnector;
+
+import org.mortbay.jetty.servlet.Context;
+import org.mortbay.jetty.servlet.ServletHolder;
+import org.mortbay.thread.QueuedThreadPool;
+
+import com.google.common.base.Joiner;
+import com.google.common.base.Splitter;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+
+
 
 /**
  * ThriftServer- this class starts up a Thrift server which implements the
@@ -43,28 +159,33 @@ import org.apache.hadoop.util.Shell.ExitCodeException;
  * independent process.
  */
 @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-public class ThriftServer {
+public class ThriftServer extends Configured implements Tool{
 
   private static final Log LOG = LogFactory.getLog(ThriftServer.class);
 
-  private static final String MIN_WORKERS_OPTION = "minWorkers";
-  private static final String MAX_WORKERS_OPTION = "workers";
-  private static final String MAX_QUEUE_SIZE_OPTION = "queue";
-  private static final String KEEP_ALIVE_SEC_OPTION = "keepAliveSec";
-  static final String BIND_OPTION = "bind";
-  static final String COMPACT_OPTION = "compact";
-  static final String FRAMED_OPTION = "framed";
-  static final String PORT_OPTION = "port";
+  protected Configuration conf;
 
-  private static final String DEFAULT_BIND_ADDR = "0.0.0.0";
-  private static final int DEFAULT_LISTEN_PORT = 9090;
+  protected InfoServer infoServer;
 
-  private Configuration conf;
-  ThriftServerRunner serverRunner;
+  protected TProcessor processor;
 
-  private InfoServer infoServer;
+  protected ThriftMetrics metrics;
+  protected HBaseServiceHandler hBaseServiceHandler;
+  protected UserGroupInformation serviceUGI;
+  protected UserGroupInformation httpUGI;
+  protected boolean httpEnable;
 
-  private static final String READ_TIMEOUT_OPTION = "readTimeout";
+  protected SaslUtil.QualityOfProtection qop;
+  protected String host;
+  protected int listenPort;
+
+  protected boolean securityEnabled;
+  protected boolean doAsEnabled;
+
+  protected JvmPauseMonitor pauseMonitor;
+
+  protected volatile TServer tserver;
+  protected volatile Server httpServer;
 
   //
   // Main program and support routines
@@ -74,43 +195,472 @@ public class ThriftServer {
     this.conf = HBaseConfiguration.create(conf);
   }
 
-  private static void printUsageAndExit(Options options, int exitCode)
+  protected ThriftMetrics createThriftMetrics(Configuration conf) {
+    return new ThriftMetrics(conf, ThriftMetrics.ThriftServerType.ONE);
+  }
+
+  protected void setupParameters() throws IOException {
+    // login the server principal (if using secure Hadoop)
+    UserProvider userProvider = UserProvider.instantiate(conf);
+    securityEnabled = userProvider.isHadoopSecurityEnabled()
+        && userProvider.isHBaseSecurityEnabled();
+    if (securityEnabled) {
+      host = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
+          conf.get(THRIFT_DNS_INTERFACE_KEY, "default"),
+          conf.get(THRIFT_DNS_NAMESERVER_KEY, "default")));
+      userProvider.login(THRIFT_KEYTAB_FILE_KEY, THRIFT_KERBEROS_PRINCIPAL_KEY, host);
+
+      // Setup the SPNEGO user for HTTP if configured
+      String spnegoPrincipal = getSpengoPrincipal(conf, host);
+      String spnegoKeytab = getSpnegoKeytab(conf);
+      UserGroupInformation.setConfiguration(conf);
+      // login the SPNEGO principal using UGI to avoid polluting the login user
+      this.httpUGI = UserGroupInformation.loginUserFromKeytabAndReturnUGI(spnegoPrincipal,
+          spnegoKeytab);
+    }
+    this.serviceUGI = userProvider.getCurrent().getUGI();
+    if (httpUGI == null) {
+      this.httpUGI = serviceUGI;
+    }
+
+    this.listenPort = conf.getInt(PORT_CONF_KEY, DEFAULT_LISTEN_PORT);
+    this.metrics = createThriftMetrics(conf);
+    this.pauseMonitor = new JvmPauseMonitor(conf, this.metrics.getSource());
+    this.hBaseServiceHandler = createHandler(conf, userProvider);
+    this.hBaseServiceHandler.initMetrics(metrics);
+    this.processor = createProcessor();
+
+    httpEnable = conf.getBoolean(USE_HTTP_CONF_KEY, false);
+    doAsEnabled = conf.getBoolean(THRIFT_SUPPORT_PROXYUSER_KEY, false);
+    if (doAsEnabled && !httpEnable) {
+      LOG.warn("Fail to enable the doAs feature." + USE_HTTP_CONF_KEY + " is not configured.");
+    }
+
+    String strQop = conf.get(THRIFT_QOP_KEY);
+    if (strQop != null) {
+      this.qop = SaslUtil.getQop(strQop);
+    }
+    if (qop != null) {
+      if (qop != SaslUtil.QualityOfProtection.AUTHENTICATION &&
+          qop != SaslUtil.QualityOfProtection.INTEGRITY &&
+          qop != SaslUtil.QualityOfProtection.PRIVACY) {
+        throw new IOException(String.format("Invalid %s: It must be one of %s, %s, or %s.",
+            THRIFT_QOP_KEY,
+            SaslUtil.QualityOfProtection.AUTHENTICATION.name(),
+            SaslUtil.QualityOfProtection.INTEGRITY.name(),
+            SaslUtil.QualityOfProtection.PRIVACY.name()));
+      }
+      checkHttpSecurity(qop, conf);
+      if (!securityEnabled) {
+        throw new IOException("Thrift server must run in secure mode to support authentication");
+      }
+    }
+    registerFilters(conf);
+    pauseMonitor.start();
+  }
+
+  private String getSpengoPrincipal(Configuration conf, String host) throws IOException {
+    String principal = conf.get(THRIFT_SPNEGO_PRINCIPAL_KEY);
+    if (principal == null) {
+      // We cannot use the Hadoop configuration deprecation handling here since
+      // the THRIFT_KERBEROS_PRINCIPAL_KEY config is still valid for regular Kerberos
+      // communication. The preference should be to use the THRIFT_SPNEGO_PRINCIPAL_KEY
+      // config so that THRIFT_KERBEROS_PRINCIPAL_KEY doesn't control both backend
+      // Kerberos principal and SPNEGO principal.
+      LOG.info(String.format("Using deprecated %s config for SPNEGO principal. Use %s instead.",
+          THRIFT_KERBEROS_PRINCIPAL_KEY, THRIFT_SPNEGO_PRINCIPAL_KEY));
+      principal = conf.get(THRIFT_KERBEROS_PRINCIPAL_KEY);
+    }
+    // Handle _HOST in principal value
+    return org.apache.hadoop.security.SecurityUtil.getServerPrincipal(principal, host);
+  }
+
+  private String getSpnegoKeytab(Configuration conf) {
+    String keytab = conf.get(THRIFT_SPNEGO_KEYTAB_FILE_KEY);
+    if (keytab == null) {
+      // We cannot use the Hadoop configuration deprecation handling here since
+      // the THRIFT_KEYTAB_FILE_KEY config is still valid for regular Kerberos
+      // communication. The preference should be to use the THRIFT_SPNEGO_KEYTAB_FILE_KEY
+      // config so that THRIFT_KEYTAB_FILE_KEY doesn't control both backend
+      // Kerberos keytab and SPNEGO keytab.
+      LOG.info(String.format("Using deprecated %s config for SPNEGO keytab. Use %s instead.",
+          THRIFT_KEYTAB_FILE_KEY, THRIFT_SPNEGO_KEYTAB_FILE_KEY));
+      keytab = conf.get(THRIFT_KEYTAB_FILE_KEY);
+    }
+    return keytab;
+  }
+
+  protected void startInfoServer() throws IOException {
+    // Put up info server.
+    int port = conf.getInt(THRIFT_INFO_SERVER_PORT, THRIFT_INFO_SERVER_PORT_DEFAULT);
+
+    if (port >= 0) {
+      conf.setLong("startcode", System.currentTimeMillis());
+      String a = conf
+          .get(THRIFT_INFO_SERVER_BINDING_ADDRESS, THRIFT_INFO_SERVER_BINDING_ADDRESS_DEFAULT);
+      infoServer = new InfoServer("thrift", a, port, false, conf);
+      infoServer.setAttribute("hbase.conf", conf);
+      infoServer.setAttribute("hbase.thrift.server.type", metrics.getThriftServerType().name());
+      infoServer.start();
+
+    }
+  }
+
+  protected void checkHttpSecurity(SaslUtil.QualityOfProtection qop, Configuration conf) {
+    if (qop == SaslUtil.QualityOfProtection.PRIVACY &&
+        conf.getBoolean(USE_HTTP_CONF_KEY, false) &&
+        !conf.getBoolean(THRIFT_SSL_ENABLED_KEY, false)) {
+      throw new IllegalArgumentException("Thrift HTTP Server's QoP is privacy, but " +
+          THRIFT_SSL_ENABLED_KEY + " is false");
+    }
+  }
+
+  protected HBaseServiceHandler createHandler(Configuration conf, UserProvider userProvider)
+      throws IOException {
+    return new ThriftHBaseServiceHandler(conf, userProvider);
+  }
+
+  protected TProcessor createProcessor() {
+    return new Hbase.Processor<>(
+        HbaseHandlerMetricsProxy.newInstance((Hbase.Iface) hBaseServiceHandler, metrics, conf));
+  }
+
+  /**
+   * the thrift server, not null means the server is started, for test only
+   * @return the tServer
+   */
+  @InterfaceAudience.Private
+  public TServer getTserver() {
+    return tserver;
+  }
+
+  /**
+   * the Jetty server, not null means the HTTP server is started, for test only
+   * @return the http server
+   */
+  @InterfaceAudience.Private
+  public Server getHttpServer() {
+    return httpServer;
+  }
+
+  protected void printUsageAndExit(Options options, int exitCode)
       throws ExitCodeException {
     HelpFormatter formatter = new HelpFormatter();
     formatter.printHelp("Thrift", null, options,
-        "To start the Thrift server run 'bin/hbase-daemon.sh start thrift'\n" +
-        "To shutdown the thrift server run 'bin/hbase-daemon.sh stop " +
-        "thrift' or send a kill signal to the thrift server pid",
+        "To start the Thrift server run 'hbase-daemon.sh start thrift' or " +
+            "'hbase thrift'\n" +
+            "To shutdown the thrift server run 'hbase-daemon.sh stop " +
+            "thrift' or send a kill signal to the thrift server pid",
         true);
     throw new ExitCodeException(exitCode, "");
   }
 
   /**
-   * Start up or shuts down the Thrift server, depending on the arguments.
-   * @param args
+   * Create a Servlet for the http server
+   * @param protocolFactory protocolFactory
+   * @return the servlet
    */
-   void doMain(final String[] args) throws Exception {
-     processOptions(args);
+  protected TServlet createTServlet(TProtocolFactory protocolFactory) {
+    return new ThriftHttpServlet(processor, protocolFactory, serviceUGI, httpUGI,
+        hBaseServiceHandler, securityEnabled, doAsEnabled);
+  }
 
-     serverRunner = new ThriftServerRunner(conf);
+  /**
+   * Setup an HTTP Server using Jetty to serve calls from THttpClient
+   *
+   * @throws IOException IOException
+   */
+  protected void setupHTTPServer() throws IOException {
+    TProtocolFactory protocolFactory = new TBinaryProtocol.Factory();
+    TServlet thriftHttpServlet = createTServlet(protocolFactory);
+
+    // Set the default max thread number to 100 to limit
+    // the number of concurrent requests so that Thrfit HTTP server doesn't OOM easily.
+    // Jetty set the default max thread number to 250, if we don't set it.
+    //
+    // Our default min thread number 2 is the same as that used by Jetty.
+    int minThreads = conf.getInt(HTTP_MIN_THREADS_KEY,
+        conf.getInt(TBoundedThreadPoolServer.MIN_WORKER_THREADS_CONF_KEY,
+            HTTP_MIN_THREADS_KEY_DEFAULT));
+    int maxThreads = conf.getInt(HTTP_MAX_THREADS_KEY,
+        conf.getInt(TBoundedThreadPoolServer.MAX_WORKER_THREADS_CONF_KEY,
+            HTTP_MAX_THREADS_KEY_DEFAULT));
+    QueuedThreadPool threadPool = new QueuedThreadPool(maxThreads);
+    threadPool.setMinThreads(minThreads);
+    httpServer = new Server();
+    httpServer.setThreadPool(threadPool);
+    httpServer.setSendServerVersion(false);
+    httpServer.setSendDateHeader(false);
+    httpServer.setStopAtShutdown(true);
+
+    // Context handler
+    Context context =
+        new Context(httpServer, "/", Context.SESSIONS);
+    context.addServlet(new ServletHolder(thriftHttpServlet), "/*");
+    HttpServerUtil.constrainHttpMethods(context,
+        conf.getBoolean(THRIFT_HTTP_ALLOW_OPTIONS_METHOD,
+            THRIFT_HTTP_ALLOW_OPTIONS_METHOD_DEFAULT));
+
+    // set up Jetty and run the embedded server
+    Connector connector = new SelectChannelConnector();
+    if(conf.getBoolean(THRIFT_SSL_ENABLED_KEY, false)) {
+      // directly use https protocol.
+      SslSelectChannelConnectorSecure sslConnector = new SslSelectChannelConnectorSecure();
+      String keystore = conf.get(THRIFT_SSL_KEYSTORE_STORE_KEY);
+      String password = HBaseConfiguration.getPassword(conf,
+          THRIFT_SSL_KEYSTORE_PASSWORD_KEY, null);
+      String keyPassword = HBaseConfiguration.getPassword(conf,
+          THRIFT_SSL_KEYSTORE_KEYPASSWORD_KEY, password);
+      sslConnector.setKeystore(keystore);
+      sslConnector.setPassword(password);
+      sslConnector.setKeyPassword(keyPassword);
+      sslConnector.setNeedClientAuth(true);
+      connector = sslConnector;
+    }
+    String host = getBindAddress(conf).getHostAddress();
+    connector.setPort(listenPort);
+    connector.setHost(host);
+    connector.setHeaderBufferSize(DEFAULT_HTTP_MAX_HEADER_SIZE);
+    httpServer.addConnector(connector);
+
+    if (doAsEnabled) {
+      ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+    }
 
-     // Put up info server.
-     int port = conf.getInt("hbase.thrift.info.port", 9095);
-     if (port >= 0) {
-       conf.setLong("startcode", System.currentTimeMillis());
-       String a = conf.get("hbase.thrift.info.bindAddress", "0.0.0.0");
-       infoServer = new InfoServer("thrift", a, port, false, conf);
-       infoServer.setAttribute("hbase.conf", conf);
-       infoServer.start();
-     }
-     serverRunner.run();
+    LOG.info("Starting Thrift HTTP Server on " + listenPort);
   }
 
   /**
-   * Parse the command line options to set parameters the conf.
+   * Setting up the thrift TServer
    */
-  private void processOptions(final String[] args) throws Exception {
-    Options options = new Options();
+  protected void setupServer() throws Exception {
+    // Construct correct ProtocolFactory
+    TProtocolFactory protocolFactory = getProtocolFactory();
+
+    ImplType implType = ImplType.getServerImpl(conf);
+    TProcessor processorToUse = processor;
+
+    // Construct correct TransportFactory
+    TTransportFactory transportFactory;
+    if (conf.getBoolean(FRAMED_CONF_KEY, FRAMED_CONF_DEFAULT) || implType.isAlwaysFramed) {
+      if (qop != null) {
+        throw new RuntimeException("Thrift server authentication "
+          + "doesn't work with framed transport yet");
+      }
+      transportFactory = new TFramedTransport.Factory(
+          conf.getInt(MAX_FRAME_SIZE_CONF_KEY, MAX_FRAME_SIZE_CONF_DEFAULT) * 1024 * 1024);
+      LOG.debug("Using framed transport");
+    } else if (qop == null) {
+      transportFactory = new TTransportFactory();
+    } else {
+      // Extract the name from the principal
+      String thriftKerberosPrincipal = conf.get(THRIFT_KERBEROS_PRINCIPAL_KEY);
+      if (thriftKerberosPrincipal == null) {
+        throw new IllegalArgumentException(THRIFT_KERBEROS_PRINCIPAL_KEY + " cannot be null");
+      }
+      String name = SecurityUtil.getUserFromPrincipal(thriftKerberosPrincipal);
+      Map<String, String> saslProperties = SaslUtil.initSaslProperties(qop.name());
+      TSaslServerTransport.Factory saslFactory = new TSaslServerTransport.Factory();
+      saslFactory.addServerDefinition("GSSAPI", name, host, saslProperties,
+          new SaslRpcServer.SaslGssCallbackHandler() {
+            @Override public void handle(Callback[] callbacks) throws UnsupportedCallbackException {
+              AuthorizeCallback ac = null;
+              for (Callback callback : callbacks) {
+                if (callback instanceof AuthorizeCallback) {
+                  ac = (AuthorizeCallback) callback;
+                } else {
+                  throw new UnsupportedCallbackException(callback,
+                      "Unrecognized SASL GSSAPI Callback");
+                }
+              }
+              if (ac != null) {
+                String authid = ac.getAuthenticationID();
+                String authzid = ac.getAuthorizationID();
+                if (!authid.equals(authzid)) {
+                  ac.setAuthorized(false);
+                } else {
+                  ac.setAuthorized(true);
+                  String userName = SecurityUtil.getUserFromPrincipal(authzid);
+                  LOG.info("Effective user: " + userName);
+                  ac.setAuthorizedID(userName);
+                }
+              }
+            }
+          });
+      transportFactory = saslFactory;
+      // Create a processor wrapper, to get the caller
+      processorToUse = new TProcessor() {
+        @Override public void process(TProtocol inProt, TProtocol outProt) throws TException {
+          TSaslServerTransport saslServerTransport = (TSaslServerTransport) inProt.getTransport();
+          SaslServer saslServer = saslServerTransport.getSaslServer();
+          String principal = saslServer.getAuthorizationID();
+          hBaseServiceHandler.setEffectiveUser(principal);
+          processor.process(inProt, outProt);
+        }
+      };
+    }
+    if (conf.get(BIND_CONF_KEY) != null && !implType.canSpecifyBindIP) {
+      LOG.error(String.format("Server types %s don't support IP address binding at the moment. See "
+              + "https://issues.apache.org/jira/browse/HBASE-2155 for details.",
+          Joiner.on(", ").join(ImplType.serversThatCannotSpecifyBindIP())));
+      throw new RuntimeException("-" + BIND_CONF_KEY + " not supported with " + implType);
+    }
+
+    InetSocketAddress inetSocketAddress = new InetSocketAddress(getBindAddress(conf), listenPort);
+    if (implType == ImplType.HS_HA || implType == ImplType.NONBLOCKING ||
+      implType == ImplType.THREADED_SELECTOR) {
+      TNonblockingServerTransport serverTransport = new TNonblockingServerSocket(inetSocketAddress);
+      if (implType == ImplType.NONBLOCKING) {
+        tserver = getTNonBlockingServer(serverTransport, protocolFactory, processorToUse,
+            transportFactory, inetSocketAddress);
+      } else if (implType == ImplType.HS_HA) {
+        tserver = getTHsHaServer(serverTransport, protocolFactory, processorToUse, transportFactory,
+            inetSocketAddress);
+      } else { // THREADED_SELECTOR
+        tserver = getTThreadedSelectorServer(serverTransport, protocolFactory, processorToUse,
+            transportFactory, inetSocketAddress);
+      }
+      LOG.info(String.format("starting HBase %s server on %s", implType.simpleClassName(),
+          Integer.toString(listenPort)));
+    } else if (implType == ImplType.THREAD_POOL) {
+      this.tserver = getTThreadPoolServer(protocolFactory, processorToUse, transportFactory,
+          inetSocketAddress);
+    } else {
+      throw new AssertionError("Unsupported Thrift server implementation: " +
+        implType.simpleClassName());
+    }
+
+    // A sanity check that we instantiated the right type of server.
+    if (tserver.getClass() != implType.serverClass) {
+      throw new AssertionError(
+          "Expected to create Thrift server class " + implType.serverClass.getName() + " but got "
+              + tserver.getClass().getName());
+    }
+  }
+
+  protected TServer getTNonBlockingServer(TNonblockingServerTransport serverTransport,
+      TProtocolFactory protocolFactory, TProcessor processor, TTransportFactory transportFactory,
+      InetSocketAddress inetSocketAddress) {
+    LOG.info("starting HBase Nonblocking Thrift server on " + inetSocketAddress.toString());
+    TNonblockingServer.Args serverArgs = new TNonblockingServer.Args(serverTransport);
+    serverArgs.processor(processor);
+    serverArgs.transportFactory(transportFactory);
+    serverArgs.protocolFactory(protocolFactory);
+    return new TNonblockingServer(serverArgs);
+  }
+
+  protected TServer getTHsHaServer(TNonblockingServerTransport serverTransport,
+      TProtocolFactory protocolFactory, TProcessor processor, TTransportFactory transportFactory,
+      InetSocketAddress inetSocketAddress) {
+    LOG.info("starting HBase HsHA Thrift server on " + inetSocketAddress.toString());
+    THsHaServer.Args serverArgs = new THsHaServer.Args(serverTransport);
+    int queueSize = conf.getInt(TBoundedThreadPoolServer.MAX_QUEUED_REQUESTS_CONF_KEY,
+        TBoundedThreadPoolServer.DEFAULT_MAX_QUEUED_REQUESTS);
+    CallQueue callQueue =
+      new CallQueue(new LinkedBlockingQueue<CallQueue.Call>(queueSize), metrics);
+    int workerThread = conf.getInt(TBoundedThreadPoolServer.MAX_WORKER_THREADS_CONF_KEY,
+        serverArgs.getMaxWorkerThreads());
+    ExecutorService executorService = createExecutor(
+        callQueue, workerThread, workerThread);
+    serverArgs.executorService(executorService).processor(processor)
+        .transportFactory(transportFactory).protocolFactory(protocolFactory);
+    return new THsHaServer(serverArgs);
+  }
+
+  protected TServer getTThreadedSelectorServer(TNonblockingServerTransport serverTransport,
+      TProtocolFactory protocolFactory, TProcessor processor, TTransportFactory transportFactory,
+      InetSocketAddress inetSocketAddress) {
+    LOG.info("starting HBase ThreadedSelector Thrift server on " + inetSocketAddress.toString());
+    TThreadedSelectorServer.Args serverArgs =
+        new HThreadedSelectorServerArgs(serverTransport, conf);
+    int queueSize = conf.getInt(TBoundedThreadPoolServer.MAX_QUEUED_REQUESTS_CONF_KEY,
+        TBoundedThreadPoolServer.DEFAULT_MAX_QUEUED_REQUESTS);
+    CallQueue callQueue =
+      new CallQueue(new LinkedBlockingQueue<CallQueue.Call>(queueSize), metrics);
+    int workerThreads = conf.getInt(TBoundedThreadPoolServer.MAX_WORKER_THREADS_CONF_KEY,
+        serverArgs.getWorkerThreads());
+    int selectorThreads = conf.getInt(THRIFT_SELECTOR_NUM, serverArgs.getSelectorThreads());
+    serverArgs.selectorThreads(selectorThreads);
+    ExecutorService executorService = createExecutor(
+        callQueue, workerThreads, workerThreads);
+    serverArgs.executorService(executorService).processor(processor)
+        .transportFactory(transportFactory).protocolFactory(protocolFactory);
+    return new TThreadedSelectorServer(serverArgs);
+  }
+
+  protected TServer getTThreadPoolServer(TProtocolFactory protocolFactory, TProcessor processor,
+      TTransportFactory transportFactory, InetSocketAddress inetSocketAddress) throws Exception {
+    LOG.info("starting HBase ThreadPool Thrift server on " + inetSocketAddress.toString());
+    // Thrift's implementation uses '0' as a placeholder for 'use the default.'
+    int backlog = conf.getInt(BACKLOG_CONF_KEY, BACKLOG_CONF_DEAFULT);
+    int readTimeout = conf.getInt(THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY,
+        THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT);
+    TServerTransport serverTransport = new TServerSocket(
+        new TServerSocket.ServerSocketTransportArgs().
+            bindAddr(inetSocketAddress).backlog(backlog).
+            clientTimeout(readTimeout));
+
+    TBoundedThreadPoolServer.Args serverArgs =
+        new TBoundedThreadPoolServer.Args(serverTransport, conf);
+    serverArgs.processor(processor).transportFactory(transportFactory)
+        .protocolFactory(protocolFactory);
+    return new TBoundedThreadPoolServer(serverArgs, metrics);
+  }
+
+  protected TProtocolFactory getProtocolFactory() {
+    TProtocolFactory protocolFactory;
+
+    if (conf.getBoolean(COMPACT_CONF_KEY, COMPACT_CONF_DEFAULT)) {
+      LOG.debug("Using compact protocol");
+      protocolFactory = new TCompactProtocol.Factory();
+    } else {
+      LOG.debug("Using binary protocol");
+      protocolFactory = new TBinaryProtocol.Factory();
+    }
+
+    return protocolFactory;
+  }
+
+  protected ExecutorService createExecutor(BlockingQueue<Runnable> callQueue,
+      int minWorkers, int maxWorkers) {
+    ThreadFactoryBuilder tfb = new ThreadFactoryBuilder();
+    tfb.setDaemon(true);
+    tfb.setNameFormat("thrift-worker-%d");
+    ThreadPoolExecutor threadPool = new THBaseThreadPoolExecutor(minWorkers, maxWorkers,
+        Long.MAX_VALUE, TimeUnit.SECONDS, callQueue, tfb.build(), metrics);
+    threadPool.allowCoreThreadTimeOut(true);
+    return threadPool;
+  }
+
+  protected InetAddress getBindAddress(Configuration conf)
+      throws UnknownHostException {
+    String bindAddressStr = conf.get(BIND_CONF_KEY, DEFAULT_BIND_ADDR);
+    return InetAddress.getByName(bindAddressStr);
+  }
+
+  public static void registerFilters(Configuration conf) {
+    String[] filters = conf.getStrings(THRIFT_FILTERS);
+    Splitter splitter = Splitter.on(':');
+    if (filters != null) {
+      List<String> filterPart = new ArrayList<String>();
+      for (String filterClass : filters) {
+        for (String part: splitter.split(filterClass)) {
+          filterPart.add(part);
+        }
+        if (filterPart.size() != 2) {
+          LOG.warn("Invalid filter specification " + filterClass + " - skipping");
+        } else {
+          ParseFilter.registerFilter(filterPart.get(0), filterPart.get(1));
+        }
+      }
+    }
+  }
+
+  /**
+   * Add options to command lines
+   * @param options options
+   */
+  protected void addOptions(Options options) {
     options.addOption("b", BIND_OPTION, true, "Address to bind " +
         "the Thrift server to. [default: " + DEFAULT_BIND_ADDR + "]");
     options.addOption("p", PORT_OPTION, true, "Port to bind to [default: " +
@@ -118,71 +668,55 @@ public class ThriftServer {
     options.addOption("f", FRAMED_OPTION, false, "Use framed transport");
     options.addOption("c", COMPACT_OPTION, false, "Use the compact protocol");
     options.addOption("h", "help", false, "Print help information");
-    options.addOption(null, "infoport", true, "Port for web UI");
+    options.addOption("s", SELECTOR_NUM_OPTION, true, "How many selector threads to use.");
+    options.addOption(null, INFOPORT_OPTION, true, "Port for web UI");
 
     options.addOption("m", MIN_WORKERS_OPTION, true,
         "The minimum number of worker threads for " +
-        ImplType.THREAD_POOL.simpleClassName());
+            ImplType.THREAD_POOL.simpleClassName());
 
     options.addOption("w", MAX_WORKERS_OPTION, true,
         "The maximum number of worker threads for " +
-        ImplType.THREAD_POOL.simpleClassName());
+            ImplType.THREAD_POOL.simpleClassName());
 
     options.addOption("q", MAX_QUEUE_SIZE_OPTION, true,
         "The maximum number of queued requests in " +
-        ImplType.THREAD_POOL.simpleClassName());
+            ImplType.THREAD_POOL.simpleClassName());
 
     options.addOption("k", KEEP_ALIVE_SEC_OPTION, true,
         "The amount of time in secods to keep a thread alive when idle in " +
-        ImplType.THREAD_POOL.simpleClassName());
-
+            ImplType.THREAD_POOL.simpleClassName());
     options.addOption("t", READ_TIMEOUT_OPTION, true,
         "Amount of time in milliseconds before a server thread will timeout " +
-        "waiting for client to send data on a connected socket. Currently, " +
-        "only applies to TBoundedThreadPoolServer");
+            "waiting for client to send data on a connected socket. Currently, " +
+            "only applies to TBoundedThreadPoolServer");
 
     options.addOptionGroup(ImplType.createOptionGroup());
+  }
 
-    CommandLineParser parser = new PosixParser();
-    CommandLine cmd = parser.parse(options, args);
-
-    // This is so complicated to please both bin/hbase and bin/hbase-daemon.
-    // hbase-daemon provides "start" and "stop" arguments
-    // hbase should print the help if no argument is provided
-    List<String> commandLine = Arrays.asList(args);
-    boolean stop = commandLine.contains("stop");
-    boolean start = commandLine.contains("start");
-    boolean invalidStartStop = (start && stop) || (!start && !stop);
-    if (cmd.hasOption("help") || invalidStartStop) {
-      if (invalidStartStop) {
-        LOG.error("Exactly one of 'start' and 'stop' has to be specified");
-      }
-      printUsageAndExit(options, 1);
-    }
-
+  protected void parseCommandLine(CommandLine cmd, Options options) throws ExitCodeException {
     // Get port to bind to
     try {
       if (cmd.hasOption(PORT_OPTION)) {
         int listenPort = Integer.parseInt(cmd.getOptionValue(PORT_OPTION));
-        conf.setInt(ThriftServerRunner.PORT_CONF_KEY, listenPort);
+        conf.setInt(PORT_CONF_KEY, listenPort);
       }
     } catch (NumberFormatException e) {
       LOG.error("Could not parse the value provided for the port option", e);
       printUsageAndExit(options, -1);
     }
-
     // check for user-defined info server port setting, if so override the conf
     try {
-      if (cmd.hasOption("infoport")) {
-        String val = cmd.getOptionValue("infoport");
-        conf.setInt("hbase.thrift.info.port", Integer.parseInt(val));
+      if (cmd.hasOption(INFOPORT_OPTION)) {
+        String val = cmd.getOptionValue(INFOPORT_OPTION);
+        conf.setInt(THRIFT_INFO_SERVER_PORT, Integer.parseInt(val));
         LOG.debug("Web UI port set to " + val);
       }
     } catch (NumberFormatException e) {
-      LOG.error("Could not parse the value provided for the infoport option", e);
+      LOG.error("Could not parse the value provided for the " + INFOPORT_OPTION +
+          " option", e);
       printUsageAndExit(options, -1);
     }
-
     // Make optional changes to the configuration based on command-line options
     optionToConf(cmd, MIN_WORKERS_OPTION,
         conf, TBoundedThreadPoolServer.MIN_WORKER_THREADS_CONF_KEY);
@@ -190,38 +724,72 @@ public class ThriftServer {
         conf, TBoundedThreadPoolServer.MAX_WORKER_THREADS_CONF_KEY);
     optionToConf(cmd, MAX_QUEUE_SIZE_OPTION,
         conf, TBoundedThreadPoolServer.MAX_QUEUED_REQUESTS_CONF_KEY);
-    optionToConf(cmd, KEEP_ALIVE_SEC_OPTION,
-        conf, TBoundedThreadPoolServer.THREAD_KEEP_ALIVE_TIME_SEC_CONF_KEY);
-    optionToConf(cmd, READ_TIMEOUT_OPTION, conf,
-        ThriftServerRunner.THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY);
-    
+    optionToConf(cmd, KEEP_ALIVE_SEC_OPTION, conf,
+      TBoundedThreadPoolServer.THREAD_KEEP_ALIVE_TIME_SEC_CONF_KEY);
+    optionToConf(cmd, READ_TIMEOUT_OPTION, conf, THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY);
+    optionToConf(cmd, SELECTOR_NUM_OPTION, conf, THRIFT_SELECTOR_NUM);
+
     // Set general thrift server options
     boolean compact = cmd.hasOption(COMPACT_OPTION) ||
-      conf.getBoolean(ThriftServerRunner.COMPACT_CONF_KEY, false);
-    conf.setBoolean(ThriftServerRunner.COMPACT_CONF_KEY, compact);
+        conf.getBoolean(COMPACT_CONF_KEY, false);
+    conf.setBoolean(COMPACT_CONF_KEY, compact);
     boolean framed = cmd.hasOption(FRAMED_OPTION) ||
-      conf.getBoolean(ThriftServerRunner.FRAMED_CONF_KEY, false);
-    conf.setBoolean(ThriftServerRunner.FRAMED_CONF_KEY, framed);
-    if (cmd.hasOption(BIND_OPTION)) {
-      conf.set(ThriftServerRunner.BIND_CONF_KEY, cmd.getOptionValue(BIND_OPTION));
-    }
+        conf.getBoolean(FRAMED_CONF_KEY, false);
+    conf.setBoolean(FRAMED_CONF_KEY, framed);
+
+    optionToConf(cmd, BIND_OPTION, conf, BIND_CONF_KEY);
+
 
     ImplType.setServerImpl(cmd, conf);
   }
 
+  /**
+   * Parse the command line options to set parameters the conf.
+   */
+  protected void processOptions(final String[] args) throws Exception {
+    if (args == null || args.length == 0) {
+      return;
+    }
+    Options options = new Options();
+    addOptions(options);
+
+    CommandLineParser parser = new BasicParser();
+    CommandLine cmd = parser.parse(options, args);
+
+    if (cmd.hasOption("help")) {
+      printUsageAndExit(options, 1);
+    }
+    parseCommandLine(cmd, options);
+  }
+
   public void stop() {
     if (this.infoServer != null) {
       LOG.info("Stopping infoServer");
       try {
         this.infoServer.stop();
       } catch (Exception ex) {
-        ex.printStackTrace();
+        LOG.error("Failed to stop infoServer", ex);
+      }
+    }
+    if (pauseMonitor != null) {
+      pauseMonitor.stop();
+    }
+    if (tserver != null) {
+      tserver.stop();
+      tserver = null;
+    }
+    if (httpServer != null) {
+      try {
+        httpServer.stop();
+        httpServer = null;
+      } catch (Exception e) {
+        LOG.error("Problem encountered in shutting down HTTP server", e);
       }
+      httpServer = null;
     }
-    serverRunner.shutdown();
   }
 
-  private static void optionToConf(CommandLine cmd, String option,
+  protected static void optionToConf(CommandLine cmd, String option,
       Configuration conf, String destConfKey) {
     if (cmd.hasOption(option)) {
       String value = cmd.getOptionValue(option);
@@ -231,15 +799,37 @@ public class ThriftServer {
   }
 
   /**
-   * @param args
-   * @throws Exception
+   * Run without any command line arguments
+   * @return exit code
+   * @throws Exception exception
    */
+  public int run() throws Exception {
+    return run(null);
+  }
+
+  @Override
+  public int run(String[] strings) throws Exception {
+    processOptions(strings);
+    setupParameters();
+    startInfoServer();
+    if (httpEnable) {
+      setupHTTPServer();
+      httpServer.start();
+      httpServer.join();
+    } else {
+      setupServer();
+      tserver.serve();
+    }
+    return 0;
+  }
+
   public static void main(String [] args) throws Exception {
+    LOG.info("***** STARTING service '" + ThriftServer.class.getSimpleName() + "' *****");
     VersionInfo.logVersion();
-    try {
-      new ThriftServer(HBaseConfiguration.create()).doMain(args);
-    } catch (ExitCodeException ex) {
-      System.exit(ex.getExitCode());
-    }
+    final Configuration conf = HBaseConfiguration.create();
+    // for now, only time we return is on an argument error.
+    final int status = ToolRunner.run(conf, new ThriftServer(conf), args);
+    LOG.info("***** STOPPING service '" + ThriftServer.class.getSimpleName() + "' *****");
+    System.exit(status);
   }
 }
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
deleted file mode 100644
index 11a372b..0000000
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ /dev/null
@@ -1,1957 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.thrift;
-
-import static org.apache.hadoop.hbase.util.Bytes.getBytes;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.UnknownHostException;
-import java.nio.ByteBuffer;
-import java.security.PrivilegedAction;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.TimeUnit;
-
-import javax.security.auth.callback.Callback;
-import javax.security.auth.callback.UnsupportedCallbackException;
-import javax.security.sasl.AuthorizeCallback;
-import javax.security.sasl.SaslServer;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionGroup;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Append;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Durability;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.Increment;
-import org.apache.hadoop.hbase.client.OperationWithAttributes;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.ParseFilter;
-import org.apache.hadoop.hbase.filter.PrefixFilter;
-import org.apache.hadoop.hbase.filter.WhileMatchFilter;
-import org.apache.hadoop.hbase.jetty.SslSelectChannelConnectorSecure;
-import org.apache.hadoop.hbase.security.SaslUtil;
-import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-import org.apache.hadoop.hbase.security.SecurityUtil;
-import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.hadoop.hbase.thrift.CallQueue.Call;
-import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
-import org.apache.hadoop.hbase.thrift.generated.BatchMutation;
-import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
-import org.apache.hadoop.hbase.thrift.generated.Hbase;
-import org.apache.hadoop.hbase.thrift.generated.IOError;
-import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
-import org.apache.hadoop.hbase.thrift.generated.Mutation;
-import org.apache.hadoop.hbase.thrift.generated.TAppend;
-import org.apache.hadoop.hbase.thrift.generated.TCell;
-import org.apache.hadoop.hbase.thrift.generated.TIncrement;
-import org.apache.hadoop.hbase.thrift.generated.TRegionInfo;
-import org.apache.hadoop.hbase.thrift.generated.TRowResult;
-import org.apache.hadoop.hbase.thrift.generated.TScan;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.ConnectionCache;
-import org.apache.hadoop.hbase.util.DNS;
-import org.apache.hadoop.hbase.util.HttpServerUtil;
-import org.apache.hadoop.hbase.util.JvmPauseMonitor;
-import org.apache.hadoop.hbase.util.Strings;
-import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.thrift.TException;
-import org.apache.thrift.TProcessor;
-import org.apache.thrift.protocol.TBinaryProtocol;
-import org.apache.thrift.protocol.TCompactProtocol;
-import org.apache.thrift.protocol.TProtocol;
-import org.apache.thrift.protocol.TProtocolFactory;
-import org.apache.thrift.server.THsHaServer;
-import org.apache.thrift.server.TNonblockingServer;
-import org.apache.thrift.server.TServer;
-import org.apache.thrift.server.TServlet;
-import org.apache.thrift.server.TThreadedSelectorServer;
-import org.apache.thrift.transport.TFramedTransport;
-import org.apache.thrift.transport.TNonblockingServerSocket;
-import org.apache.thrift.transport.TNonblockingServerTransport;
-import org.apache.thrift.transport.TSaslServerTransport;
-import org.apache.thrift.transport.TServerSocket;
-import org.apache.thrift.transport.TServerTransport;
-import org.apache.thrift.transport.TTransportFactory;
-import org.mortbay.jetty.Connector;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.nio.SelectChannelConnector;
-import org.mortbay.jetty.servlet.Context;
-import org.mortbay.jetty.servlet.ServletHolder;
-import org.mortbay.jetty.webapp.WebAppContext;
-import org.mortbay.thread.QueuedThreadPool;
-
-import com.google.common.base.Joiner;
-import com.google.common.base.Throwables;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
-
-/**
- * ThriftServerRunner - this class starts up a Thrift server which implements
- * the Hbase API specified in the Hbase.thrift IDL file.
- */
-@InterfaceAudience.Private
-public class ThriftServerRunner implements Runnable {
-
-  private static final Log LOG = LogFactory.getLog(ThriftServerRunner.class);
-
-  static final String SERVER_TYPE_CONF_KEY =
-      "hbase.regionserver.thrift.server.type";
-
-  static final String BIND_CONF_KEY = "hbase.regionserver.thrift.ipaddress";
-  static final String COMPACT_CONF_KEY = "hbase.regionserver.thrift.compact";
-  static final String FRAMED_CONF_KEY = "hbase.regionserver.thrift.framed";
-  static final String MAX_FRAME_SIZE_CONF_KEY = "hbase.regionserver.thrift.framed.max_frame_size_in_mb";
-  static final String PORT_CONF_KEY = "hbase.regionserver.thrift.port";
-  static final String COALESCE_INC_KEY = "hbase.regionserver.thrift.coalesceIncrement";
-  static final String USE_HTTP_CONF_KEY = "hbase.regionserver.thrift.http";
-  static final String HTTP_MIN_THREADS = "hbase.thrift.http_threads.min";
-  static final String HTTP_MAX_THREADS = "hbase.thrift.http_threads.max";
-
-  static final String THRIFT_SSL_ENABLED = "hbase.thrift.ssl.enabled";
-  static final String THRIFT_SSL_KEYSTORE_STORE = "hbase.thrift.ssl.keystore.store";
-  static final String THRIFT_SSL_KEYSTORE_PASSWORD = "hbase.thrift.ssl.keystore.password";
-  static final String THRIFT_SSL_KEYSTORE_KEYPASSWORD = "hbase.thrift.ssl.keystore.keypassword";
-
-  /**
-   * Amount of time in milliseconds before a server thread will timeout
-   * waiting for client to send data on a connected socket. Currently,
-   * applies only to TBoundedThreadPoolServer
-   */
-  public static final String THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY =
-    "hbase.thrift.server.socket.read.timeout";
-  public static final int THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT = 60000;
-
-
-  /**
-   * Thrift quality of protection configuration key. Valid values can be:
-   * auth-conf: authentication, integrity and confidentiality checking
-   * auth-int: authentication and integrity checking
-   * auth: authentication only
-   *
-   * This is used to authenticate the callers and support impersonation.
-   * The thrift server and the HBase cluster must run in secure mode.
-   */
-  static final String THRIFT_QOP_KEY = "hbase.thrift.security.qop";
-  static final String BACKLOG_CONF_KEY = "hbase.regionserver.thrift.backlog";
-
-  private static final String DEFAULT_BIND_ADDR = "0.0.0.0";
-  public static final int DEFAULT_LISTEN_PORT = 9090;
-  public static final int HREGION_VERSION = 1;
-  static final String THRIFT_SUPPORT_PROXYUSER = "hbase.thrift.support.proxyuser";
-  private final int listenPort;
-
-  private Configuration conf;
-  volatile TServer tserver;
-  volatile Server httpServer;
-  private final Hbase.Iface handler;
-  private final ThriftMetrics metrics;
-  private final HBaseHandler hbaseHandler;
-  private final UserGroupInformation realUser;
-
-  private SaslUtil.QualityOfProtection qop;
-  private String host;
-
-  private final boolean securityEnabled;
-  private final boolean doAsEnabled;
-
-  private final JvmPauseMonitor pauseMonitor;
-
-  static String THRIFT_HTTP_ALLOW_OPTIONS_METHOD = "hbase.thrift.http.allow.options.method";
-  private static boolean THRIFT_HTTP_ALLOW_OPTIONS_METHOD_DEFAULT = false;
-
-  /** An enum of server implementation selections */
-  enum ImplType {
-    HS_HA("hsha", true, THsHaServer.class, true),
-    NONBLOCKING("nonblocking", true, TNonblockingServer.class, true),
-    THREAD_POOL("threadpool", false, TBoundedThreadPoolServer.class, true),
-    THREADED_SELECTOR(
-        "threadedselector", true, TThreadedSelectorServer.class, true);
-
-    public static final ImplType DEFAULT = THREAD_POOL;
-
-    final String option;
-    final boolean isAlwaysFramed;
-    final Class<? extends TServer> serverClass;
-    final boolean canSpecifyBindIP;
-
-    ImplType(String option, boolean isAlwaysFramed,
-        Class<? extends TServer> serverClass, boolean canSpecifyBindIP) {
-      this.option = option;
-      this.isAlwaysFramed = isAlwaysFramed;
-      this.serverClass = serverClass;
-      this.canSpecifyBindIP = canSpecifyBindIP;
-    }
-
-    /**
-     * @return <code>-option</code> so we can get the list of options from
-     *         {@link #values()}
-     */
-    @Override
-    public String toString() {
-      return "-" + option;
-    }
-
-    String getDescription() {
-      StringBuilder sb = new StringBuilder("Use the " +
-          serverClass.getSimpleName());
-      if (isAlwaysFramed) {
-        sb.append(" This implies the framed transport.");
-      }
-      if (this == DEFAULT) {
-        sb.append("This is the default.");
-      }
-      return sb.toString();
-    }
-
-    static OptionGroup createOptionGroup() {
-      OptionGroup group = new OptionGroup();
-      for (ImplType t : values()) {
-        group.addOption(new Option(t.option, t.getDescription()));
-      }
-      return group;
-    }
-
-    static ImplType getServerImpl(Configuration conf) {
-      String confType = conf.get(SERVER_TYPE_CONF_KEY, THREAD_POOL.option);
-      for (ImplType t : values()) {
-        if (confType.equals(t.option)) {
-          return t;
-        }
-      }
-      throw new AssertionError("Unknown server ImplType.option:" + confType);
-    }
-
-    static void setServerImpl(CommandLine cmd, Configuration conf) {
-      ImplType chosenType = null;
-      int numChosen = 0;
-      for (ImplType t : values()) {
-        if (cmd.hasOption(t.option)) {
-          chosenType = t;
-          ++numChosen;
-        }
-      }
-      if (numChosen < 1) {
-        LOG.info("Using default thrift server type");
-        chosenType = DEFAULT;
-      } else if (numChosen > 1) {
-        throw new AssertionError("Exactly one option out of " +
-          Arrays.toString(values()) + " has to be specified");
-      }
-      LOG.info("Using thrift server type " + chosenType.option);
-      conf.set(SERVER_TYPE_CONF_KEY, chosenType.option);
-    }
-
-    public String simpleClassName() {
-      return serverClass.getSimpleName();
-    }
-
-    public static List<String> serversThatCannotSpecifyBindIP() {
-      List<String> l = new ArrayList<String>();
-      for (ImplType t : values()) {
-        if (!t.canSpecifyBindIP) {
-          l.add(t.simpleClassName());
-        }
-      }
-      return l;
-    }
-
-  }
-
-  public ThriftServerRunner(Configuration conf) throws IOException {
-    UserProvider userProvider = UserProvider.instantiate(conf);
-    // login the server principal (if using secure Hadoop)
-    securityEnabled = userProvider.isHadoopSecurityEnabled()
-      && userProvider.isHBaseSecurityEnabled();
-    if (securityEnabled) {
-      host = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
-        conf.get("hbase.thrift.dns.interface", "default"),
-        conf.get("hbase.thrift.dns.nameserver", "default")));
-      userProvider.login("hbase.thrift.keytab.file",
-        "hbase.thrift.kerberos.principal", host);
-    }
-    this.conf = HBaseConfiguration.create(conf);
-    this.listenPort = conf.getInt(PORT_CONF_KEY, DEFAULT_LISTEN_PORT);
-    this.metrics = new ThriftMetrics(conf, ThriftMetrics.ThriftServerType.ONE);
-    this.pauseMonitor = new JvmPauseMonitor(conf, this.metrics.getSource());
-    this.hbaseHandler = new HBaseHandler(conf, userProvider);
-    this.hbaseHandler.initMetrics(metrics);
-    this.handler = HbaseHandlerMetricsProxy.newInstance(
-      hbaseHandler, metrics, conf);
-    this.realUser = userProvider.getCurrent().getUGI();
-    String strQop = conf.get(THRIFT_QOP_KEY);
-    if (strQop != null) {
-      this.qop = SaslUtil.getQop(strQop);
-    }
-    doAsEnabled = conf.getBoolean(THRIFT_SUPPORT_PROXYUSER, false);
-    if (doAsEnabled) {
-      if (!conf.getBoolean(USE_HTTP_CONF_KEY, false)) {
-        LOG.warn("Fail to enable the doAs feature. hbase.regionserver.thrift.http is not configured ");
-      }
-    }
-    if (qop != null) {
-      if (qop != QualityOfProtection.AUTHENTICATION &&
-          qop != QualityOfProtection.INTEGRITY &&
-          qop != QualityOfProtection.PRIVACY) {
-        throw new IOException(String.format("Invalide %s: It must be one of %s, %s, or %s.",
-                              THRIFT_QOP_KEY,
-                              QualityOfProtection.AUTHENTICATION.name(),
-                              QualityOfProtection.INTEGRITY.name(),
-                              QualityOfProtection.PRIVACY.name()));
-      }
-      checkHttpSecurity(qop, conf);
-      if (!securityEnabled) {
-        throw new IOException("Thrift server must"
-          + " run in secure mode to support authentication");
-      }
-    }
-  }
-
-  private void checkHttpSecurity(QualityOfProtection qop, Configuration conf) {
-    if (qop == QualityOfProtection.PRIVACY &&
-        conf.getBoolean(USE_HTTP_CONF_KEY, false) &&
-        !conf.getBoolean(THRIFT_SSL_ENABLED, false)) {
-      throw new IllegalArgumentException("Thrift HTTP Server's QoP is privacy, but " +
-          THRIFT_SSL_ENABLED + " is false");
-    }
-  }
-
-  /*
-   * Runs the Thrift server
-   */
-  @Override
-  public void run() {
-    realUser.doAs(new PrivilegedAction<Object>() {
-      @Override
-      public Object run() {
-        try {
-          pauseMonitor.start();
-          if (conf.getBoolean(USE_HTTP_CONF_KEY, false)) {
-            setupHTTPServer();
-            httpServer.start();
-            httpServer.join();
-          } else {
-            setupServer();
-            tserver.serve();
-          }
-        } catch (Exception e) {
-          LOG.fatal("Cannot run ThriftServer", e);
-          // Crash the process if the ThriftServer is not running
-          System.exit(-1);
-        }
-        return null;
-      }
-    });
-
-  }
-
-  public void shutdown() {
-    if (pauseMonitor != null) {
-      pauseMonitor.stop();
-    }
-    if (tserver != null) {
-      tserver.stop();
-      tserver = null;
-    }
-    if (httpServer != null) {
-      try {
-        httpServer.stop();
-        httpServer = null;
-      } catch (Exception e) {
-        LOG.error("Problem encountered in shutting down HTTP server " + e.getCause());
-      }
-      httpServer = null;
-    }
-  }
-
-  private void setupHTTPServer() throws IOException {
-    TProtocolFactory protocolFactory = new TBinaryProtocol.Factory();
-    TProcessor processor = new Hbase.Processor<Hbase.Iface>(handler);
-    TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory, realUser,
-        conf, hbaseHandler, securityEnabled, doAsEnabled);
-
-    httpServer = new Server();
-    // Context handler
-    Context context = new WebAppContext();
-    context.setContextPath("/");
-    context.setResourceBase("hbase-webapps/");
-    String httpPath = "/*";
-    httpServer.setHandler(context);
-    context.addServlet(new ServletHolder(thriftHttpServlet), httpPath);
-    HttpServerUtil.constrainHttpMethods(context,
-      conf.getBoolean(THRIFT_HTTP_ALLOW_OPTIONS_METHOD, THRIFT_HTTP_ALLOW_OPTIONS_METHOD_DEFAULT));
-
-    // set up Jetty and run the embedded server
-    Connector connector = new SelectChannelConnector();
-    if(conf.getBoolean(THRIFT_SSL_ENABLED, false)) {
-      SslSelectChannelConnectorSecure sslConnector = new SslSelectChannelConnectorSecure();
-      String keystore = conf.get(THRIFT_SSL_KEYSTORE_STORE);
-      String password = HBaseConfiguration.getPassword(conf,
-          THRIFT_SSL_KEYSTORE_PASSWORD, null);
-      String keyPassword = HBaseConfiguration.getPassword(conf,
-          THRIFT_SSL_KEYSTORE_KEYPASSWORD, password);
-      sslConnector.setKeystore(keystore);
-      sslConnector.setPassword(password);
-      sslConnector.setKeyPassword(keyPassword);
-      connector = sslConnector;
-    }
-    String host = getBindAddress(conf).getHostAddress();
-    connector.setPort(listenPort);
-    connector.setHost(host);
-    connector.setHeaderBufferSize(1024 * 64);
-    httpServer.addConnector(connector);
-
-    if (doAsEnabled) {
-      ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
-    }
-
-    // Set the default max thread number to 100 to limit
-    // the number of concurrent requests so that Thrfit HTTP server doesn't OOM easily.
-    // Jetty set the default max thread number to 250, if we don't set it.
-    //
-    // Our default min thread number 2 is the same as that used by Jetty.
-    int minThreads = conf.getInt(HTTP_MIN_THREADS, 2);
-    int maxThreads = conf.getInt(HTTP_MAX_THREADS, 100);
-    QueuedThreadPool threadPool = new QueuedThreadPool(maxThreads);
-    threadPool.setMinThreads(minThreads);
-    httpServer.setThreadPool(threadPool);
-
-    httpServer.setSendServerVersion(false);
-    httpServer.setSendDateHeader(false);
-    httpServer.setStopAtShutdown(true);
-
-    LOG.info("Starting Thrift HTTP Server on " + Integer.toString(listenPort));
-  }
-
-  /**
-   * Setting up the thrift TServer
-   */
-  private void setupServer() throws Exception {
-    // Construct correct ProtocolFactory
-    TProtocolFactory protocolFactory;
-    if (conf.getBoolean(COMPACT_CONF_KEY, false)) {
-      LOG.debug("Using compact protocol");
-      protocolFactory = new TCompactProtocol.Factory();
-    } else {
-      LOG.debug("Using binary protocol");
-      protocolFactory = new TBinaryProtocol.Factory();
-    }
-
-    final TProcessor p = new Hbase.Processor<Hbase.Iface>(handler);
-    ImplType implType = ImplType.getServerImpl(conf);
-    TProcessor processor = p;
-
-    // Construct correct TransportFactory
-    TTransportFactory transportFactory;
-    if (conf.getBoolean(FRAMED_CONF_KEY, false) || implType.isAlwaysFramed) {
-      if (qop != null) {
-        throw new RuntimeException("Thrift server authentication"
-          + " doesn't work with framed transport yet");
-      }
-      transportFactory = new TFramedTransport.Factory(
-          conf.getInt(MAX_FRAME_SIZE_CONF_KEY, 2)  * 1024 * 1024);
-      LOG.debug("Using framed transport");
-    } else if (qop == null) {
-      transportFactory = new TTransportFactory();
-    } else {
-      // Extract the name from the principal
-      String name = SecurityUtil.getUserFromPrincipal(
-        conf.get("hbase.thrift.kerberos.principal"));
-      Map<String, String> saslProperties = SaslUtil.initSaslProperties(qop.name());
-      TSaslServerTransport.Factory saslFactory = new TSaslServerTransport.Factory();
-      saslFactory.addServerDefinition("GSSAPI", name, host, saslProperties,
-        new SaslGssCallbackHandler() {
-          @Override
-          public void handle(Callback[] callbacks)
-              throws UnsupportedCallbackException {
-            AuthorizeCallback ac = null;
-            for (Callback callback : callbacks) {
-              if (callback instanceof AuthorizeCallback) {
-                ac = (AuthorizeCallback) callback;
-              } else {
-                throw new UnsupportedCallbackException(callback,
-                    "Unrecognized SASL GSSAPI Callback");
-              }
-            }
-            if (ac != null) {
-              String authid = ac.getAuthenticationID();
-              String authzid = ac.getAuthorizationID();
-              if (!authid.equals(authzid)) {
-                ac.setAuthorized(false);
-              } else {
-                ac.setAuthorized(true);
-                String userName = SecurityUtil.getUserFromPrincipal(authzid);
-                LOG.info("Effective user: " + userName);
-                ac.setAuthorizedID(userName);
-              }
-            }
-          }
-        });
-      transportFactory = saslFactory;
-
-      // Create a processor wrapper, to get the caller
-      processor = new TProcessor() {
-        @Override
-        public void process(TProtocol inProt, TProtocol outProt) throws TException {
-          TSaslServerTransport saslServerTransport =
-              (TSaslServerTransport)inProt.getTransport();
-          SaslServer saslServer = saslServerTransport.getSaslServer();
-          String principal = saslServer.getAuthorizationID();
-          hbaseHandler.setEffectiveUser(principal);
-          p.process(inProt, outProt);
-        }
-      };
-    }
-
-    if (conf.get(BIND_CONF_KEY) != null && !implType.canSpecifyBindIP) {
-      LOG.error("Server types " + Joiner.on(", ").join(
-          ImplType.serversThatCannotSpecifyBindIP()) + " don't support IP " +
-          "address binding at the moment. See " +
-          "https://issues.apache.org/jira/browse/HBASE-2155 for details.");
-      throw new RuntimeException(
-          "-" + BIND_CONF_KEY + " not supported with " + implType);
-    }
-
-    // Thrift's implementation uses '0' as a placeholder for 'use the default.'
-    int backlog = conf.getInt(BACKLOG_CONF_KEY, 0);
-
-    if (implType == ImplType.HS_HA || implType == ImplType.NONBLOCKING ||
-        implType == ImplType.THREADED_SELECTOR) {
-      InetAddress listenAddress = getBindAddress(conf);
-      TNonblockingServerTransport serverTransport = new TNonblockingServerSocket(
-          new InetSocketAddress(listenAddress, listenPort));
-
-      if (implType == ImplType.NONBLOCKING) {
-        TNonblockingServer.Args serverArgs =
-            new TNonblockingServer.Args(serverTransport);
-        serverArgs.processor(processor)
-                  .transportFactory(transportFactory)
-                  .protocolFactory(protocolFactory);
-        tserver = new TNonblockingServer(serverArgs);
-      } else if (implType == ImplType.HS_HA) {
-        THsHaServer.Args serverArgs = new THsHaServer.Args(serverTransport);
-        CallQueue callQueue =
-            new CallQueue(new LinkedBlockingQueue<Call>(), metrics);
-        ExecutorService executorService = createExecutor(
-            callQueue, serverArgs.getMinWorkerThreads(), serverArgs.getMaxWorkerThreads());
-        serverArgs.executorService(executorService)
-                  .processor(processor)
-                  .transportFactory(transportFactory)
-                  .protocolFactory(protocolFactory);
-        tserver = new THsHaServer(serverArgs);
-      } else { // THREADED_SELECTOR
-        TThreadedSelectorServer.Args serverArgs =
-            new HThreadedSelectorServerArgs(serverTransport, conf);
-        CallQueue callQueue =
-            new CallQueue(new LinkedBlockingQueue<Call>(), metrics);
-        ExecutorService executorService = createExecutor(
-            callQueue, serverArgs.getWorkerThreads(), serverArgs.getWorkerThreads());
-        serverArgs.executorService(executorService)
-                  .processor(processor)
-                  .transportFactory(transportFactory)
-                  .protocolFactory(protocolFactory);
-        tserver = new TThreadedSelectorServer(serverArgs);
-      }
-      LOG.info("starting HBase " + implType.simpleClassName() +
-          " server on " + Integer.toString(listenPort));
-    } else if (implType == ImplType.THREAD_POOL) {
-      // Thread pool server. Get the IP address to bind to.
-      InetAddress listenAddress = getBindAddress(conf);
-      int readTimeout = conf.getInt(THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY,
-          THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT);
-      TServerTransport serverTransport = new TServerSocket(
-          new TServerSocket.ServerSocketTransportArgs().
-              bindAddr(new InetSocketAddress(listenAddress, listenPort)).
-              backlog(backlog).
-              clientTimeout(readTimeout));
-
-      TBoundedThreadPoolServer.Args serverArgs =
-          new TBoundedThreadPoolServer.Args(serverTransport, conf);
-      serverArgs.processor(processor)
-                .transportFactory(transportFactory)
-                .protocolFactory(protocolFactory);
-      LOG.info("starting " + ImplType.THREAD_POOL.simpleClassName() + " on "
-          + listenAddress + ":" + Integer.toString(listenPort)
-          + " with readTimeout " + readTimeout + "ms; " + serverArgs);
-      TBoundedThreadPoolServer tserver =
-          new TBoundedThreadPoolServer(serverArgs, metrics);
-      this.tserver = tserver;
-    } else {
-      throw new AssertionError("Unsupported Thrift server implementation: " +
-          implType.simpleClassName());
-    }
-
-    // A sanity check that we instantiated the right type of server.
-    if (tserver.getClass() != implType.serverClass) {
-      throw new AssertionError("Expected to create Thrift server class " +
-          implType.serverClass.getName() + " but got " +
-          tserver.getClass().getName());
-    }
-
-
-
-    registerFilters(conf);
-  }
-
-  ExecutorService createExecutor(BlockingQueue<Runnable> callQueue,
-                                 int minWorkers, int maxWorkers) {
-    ThreadFactoryBuilder tfb = new ThreadFactoryBuilder();
-    tfb.setDaemon(true);
-    tfb.setNameFormat("thrift-worker-%d");
-    return new THBaseThreadPoolExecutor(minWorkers, maxWorkers,
-            Long.MAX_VALUE, TimeUnit.SECONDS, callQueue, tfb.build(), metrics);
-  }
-
-  private InetAddress getBindAddress(Configuration conf)
-      throws UnknownHostException {
-    String bindAddressStr = conf.get(BIND_CONF_KEY, DEFAULT_BIND_ADDR);
-    return InetAddress.getByName(bindAddressStr);
-  }
-
-  protected static class ResultScannerWrapper {
-
-    private final ResultScanner scanner;
-    private final boolean sortColumns;
-    public ResultScannerWrapper(ResultScanner resultScanner,
-                                boolean sortResultColumns) {
-      scanner = resultScanner;
-      sortColumns = sortResultColumns;
-   }
-
-    public ResultScanner getScanner() {
-      return scanner;
-    }
-
-    public boolean isColumnSorted() {
-      return sortColumns;
-    }
-  }
-
-  /**
-   * The HBaseHandler is a glue object that connects Thrift RPC calls to the
-   * HBase client API primarily defined in the Admin and Table objects.
-   */
-  public static class HBaseHandler implements Hbase.Iface {
-    protected Configuration conf;
-    protected static final Log LOG = LogFactory.getLog(HBaseHandler.class);
-
-    // nextScannerId and scannerMap are used to manage scanner state
-    protected int nextScannerId = 0;
-    protected HashMap<Integer, ResultScannerWrapper> scannerMap = null;
-    private ThriftMetrics metrics = null;
-
-    private final ConnectionCache connectionCache;
-    IncrementCoalescer coalescer = null;
-
-    static final String CLEANUP_INTERVAL = "hbase.thrift.connection.cleanup-interval";
-    static final String MAX_IDLETIME = "hbase.thrift.connection.max-idletime";
-
-    /**
-     * Returns a list of all the column families for a given Table.
-     *
-     * @param table
-     * @throws IOException
-     */
-    byte[][] getAllColumns(Table table) throws IOException {
-      HColumnDescriptor[] cds = table.getTableDescriptor().getColumnFamilies();
-      byte[][] columns = new byte[cds.length][];
-      for (int i = 0; i < cds.length; i++) {
-        columns[i] = Bytes.add(cds[i].getName(),
-            KeyValue.COLUMN_FAMILY_DELIM_ARRAY);
-      }
-      return columns;
-    }
-
-    /**
-     * Creates and returns a Table instance from a given table name.
-     *
-     * @param tableName
-     *          name of table
-     * @return Table object
-     * @throws IOException
-     */
-    public Table getTable(final byte[] tableName) throws
-        IOException {
-      String table = Bytes.toString(tableName);
-      return connectionCache.getTable(table);
-    }
-
-    public Table getTable(final ByteBuffer tableName) throws IOException {
-      return getTable(getBytes(tableName));
-    }
-
-    /**
-     * Assigns a unique ID to the scanner and adds the mapping to an internal
-     * hash-map.
-     *
-     * @param scanner
-     * @return integer scanner id
-     */
-    protected synchronized int addScanner(ResultScanner scanner,boolean sortColumns) {
-      int id = nextScannerId++;
-      ResultScannerWrapper resultScannerWrapper = new ResultScannerWrapper(scanner, sortColumns);
-      scannerMap.put(id, resultScannerWrapper);
-      return id;
-    }
-
-    /**
-     * Returns the scanner associated with the specified ID.
-     *
-     * @param id
-     * @return a Scanner, or null if ID was invalid.
-     */
-    protected synchronized ResultScannerWrapper getScanner(int id) {
-      return scannerMap.get(id);
-    }
-
-    /**
-     * Removes the scanner associated with the specified ID from the internal
-     * id-&gt;scanner hash-map.
-     *
-     * @param id
-     * @return a Scanner, or null if ID was invalid.
-     */
-    protected synchronized ResultScannerWrapper removeScanner(int id) {
-      return scannerMap.remove(id);
-    }
-
-    protected HBaseHandler(final Configuration c,
-        final UserProvider userProvider) throws IOException {
-      this.conf = c;
-      scannerMap = new HashMap<Integer, ResultScannerWrapper>();
-      this.coalescer = new IncrementCoalescer(this);
-
-      int cleanInterval = conf.getInt(CLEANUP_INTERVAL, 10 * 1000);
-      int maxIdleTime = conf.getInt(MAX_IDLETIME, 10 * 60 * 1000);
-      connectionCache = new ConnectionCache(
-        conf, userProvider, cleanInterval, maxIdleTime);
-    }
-
-    /**
-     * Obtain HBaseAdmin. Creates the instance if it is not already created.
-     */
-    private Admin getAdmin() throws IOException {
-      return connectionCache.getAdmin();
-    }
-
-    void setEffectiveUser(String effectiveUser) {
-      connectionCache.setEffectiveUser(effectiveUser);
-    }
-
-    @Override
-    public void enableTable(ByteBuffer tableName) throws IOError {
-      try{
-        getAdmin().enableTable(getTableName(tableName));
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      }
-    }
-
-    @Override
-    public void disableTable(ByteBuffer tableName) throws IOError{
-      try{
-        getAdmin().disableTable(getTableName(tableName));
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      }
-    }
-
-    @Override
-    public boolean isTableEnabled(ByteBuffer tableName) throws IOError {
-      try {
-        return this.connectionCache.getAdmin().isTableEnabled(getTableName(tableName));
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      }
-    }
-
-    @Override
-    public void compact(ByteBuffer tableNameOrRegionName) throws IOError {
-      try {
-        // TODO: HBaseAdmin.compact(byte[]) deprecated and not trivial to replace here.
-        // ThriftServerRunner.compact should be deprecated and replaced with methods specific to
-        // table and region.
-        ((HBaseAdmin) getAdmin()).compact(getBytes(tableNameOrRegionName));
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      }
-    }
-
-    @Override
-    public void majorCompact(ByteBuffer tableNameOrRegionName) throws IOError {
-      try {
-        // TODO: HBaseAdmin.majorCompact(byte[]) deprecated and not trivial to replace here.
-        // ThriftServerRunner.majorCompact should be deprecated and replaced with methods specific
-        // to table and region.
-        ((HBaseAdmin) getAdmin()).majorCompact(getBytes(tableNameOrRegionName));
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      }
-    }
-
-    @Override
-    public List<ByteBuffer> getTableNames() throws IOError {
-      try {
-        TableName[] tableNames = this.getAdmin().listTableNames();
-        ArrayList<ByteBuffer> list = new ArrayList<ByteBuffer>(tableNames.length);
-        for (int i = 0; i < tableNames.length; i++) {
-          list.add(ByteBuffer.wrap(tableNames[i].getName()));
-        }
-        return list;
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      }
-    }
-
-    /**
-     * @return the list of regions in the given table, or an empty list if the table does not exist
-     */
-    @Override
-    public List<TRegionInfo> getTableRegions(ByteBuffer tableName)
-    throws IOError {
-      try (RegionLocator locator = connectionCache.getRegionLocator(getBytes(tableName))) {
-        List<HRegionLocation> regionLocations = locator.getAllRegionLocations();
-        List<TRegionInfo> results = new ArrayList<TRegionInfo>();
-        for (HRegionLocation regionLocation : regionLocations) {
-          HRegionInfo info = regionLocation.getRegionInfo();
-          ServerName serverName = regionLocation.getServerName();
-          TRegionInfo region = new TRegionInfo();
-          region.serverName = ByteBuffer.wrap(
-              Bytes.toBytes(serverName.getHostname()));
-          region.port = serverName.getPort();
-          region.startKey = ByteBuffer.wrap(info.getStartKey());
-          region.endKey = ByteBuffer.wrap(info.getEndKey());
-          region.id = info.getRegionId();
-          region.name = ByteBuffer.wrap(info.getRegionName());
-          region.version = info.getVersion();
-          results.add(region);
-        }
-        return results;
-      } catch (TableNotFoundException e) {
-        // Return empty list for non-existing table
-        return Collections.emptyList();
-      } catch (IOException e){
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      }
-    }
-
-    @Override
-    public List<TCell> get(
-        ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
-        Map<ByteBuffer, ByteBuffer> attributes)
-        throws IOError {
-      byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
-      if (famAndQf.length == 1) {
-        return get(tableName, row, famAndQf[0], null, attributes);
-      }
-      if (famAndQf.length == 2) {
-        return get(tableName, row, famAndQf[0], famAndQf[1], attributes);
-      }
-      throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
-    }
-
-    /**
-     * Note: this internal interface is slightly different from public APIs in regard to handling
-     * of the qualifier. Here we differ from the public Java API in that null != byte[0]. Rather,
-     * we respect qual == null as a request for the entire column family. The caller (
-     * {@link #get(ByteBuffer, ByteBuffer, ByteBuffer, Map)}) interface IS consistent in that the
-     * column is parse like normal.
-     */
-    protected List<TCell> get(ByteBuffer tableName,
-                              ByteBuffer row,
-                              byte[] family,
-                              byte[] qualifier,
-                              Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        Get get = new Get(getBytes(row));
-        addAttributes(get, attributes);
-        if (qualifier == null) {
-          get.addFamily(family);
-        } else {
-          get.addColumn(family, qualifier);
-        }
-        Result result = table.get(get);
-        return ThriftUtilities.cellFromHBase(result.rawCells());
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally {
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public List<TCell> getVer(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
-        int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
-      if(famAndQf.length == 1) {
-        return getVer(tableName, row, famAndQf[0], null, numVersions, attributes);
-      }
-      if (famAndQf.length == 2) {
-        return getVer(tableName, row, famAndQf[0], famAndQf[1], numVersions, attributes);
-      }
-      throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
-
-    }
-
-    /**
-     * Note: this public interface is slightly different from public Java APIs in regard to
-     * handling of the qualifier. Here we differ from the public Java API in that null != byte[0].
-     * Rather, we respect qual == null as a request for the entire column family. If you want to
-     * access the entire column family, use
-     * {@link #getVer(ByteBuffer, ByteBuffer, ByteBuffer, int, Map)} with a {@code column} value
-     * that lacks a {@code ':'}.
-     */
-    public List<TCell> getVer(ByteBuffer tableName, ByteBuffer row, byte[] family,
-        byte[] qualifier, int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        Get get = new Get(getBytes(row));
-        addAttributes(get, attributes);
-        if (null == qualifier) {
-          get.addFamily(family);
-        } else {
-          get.addColumn(family, qualifier);
-        }
-        get.setMaxVersions(numVersions);
-        Result result = table.get(get);
-        return ThriftUtilities.cellFromHBase(result.rawCells());
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally{
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public List<TCell> getVerTs(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
-        long timestamp, int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
-      if (famAndQf.length == 1) {
-        return getVerTs(tableName, row, famAndQf[0], null, timestamp, numVersions, attributes);
-      }
-      if (famAndQf.length == 2) {
-        return getVerTs(tableName, row, famAndQf[0], famAndQf[1], timestamp, numVersions,
-          attributes);
-      }
-      throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
-    }
-
-    /**
-     * Note: this internal interface is slightly different from public APIs in regard to handling
-     * of the qualifier. Here we differ from the public Java API in that null != byte[0]. Rather,
-     * we respect qual == null as a request for the entire column family. The caller (
-     * {@link #getVerTs(ByteBuffer, ByteBuffer, ByteBuffer, long, int, Map)}) interface IS
-     * consistent in that the column is parse like normal.
-     */
-    protected List<TCell> getVerTs(ByteBuffer tableName, ByteBuffer row, byte[] family,
-        byte[] qualifier, long timestamp, int numVersions, Map<ByteBuffer, ByteBuffer> attributes)
-        throws IOError {
-      
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        Get get = new Get(getBytes(row));
-        addAttributes(get, attributes);
-        if (null == qualifier) {
-          get.addFamily(family);
-        } else {
-          get.addColumn(family, qualifier);
-        }
-        get.setTimeRange(0, timestamp);
-        get.setMaxVersions(numVersions);
-        Result result = table.get(get);
-        return ThriftUtilities.cellFromHBase(result.rawCells());
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally{
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public List<TRowResult> getRow(ByteBuffer tableName, ByteBuffer row,
-        Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      return getRowWithColumnsTs(tableName, row, null,
-                                 HConstants.LATEST_TIMESTAMP,
-                                 attributes);
-    }
-
-    @Override
-    public List<TRowResult> getRowWithColumns(ByteBuffer tableName,
-                                              ByteBuffer row,
-        List<ByteBuffer> columns,
-        Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      return getRowWithColumnsTs(tableName, row, columns,
-                                 HConstants.LATEST_TIMESTAMP,
-                                 attributes);
-    }
-
-    @Override
-    public List<TRowResult> getRowTs(ByteBuffer tableName, ByteBuffer row,
-        long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      return getRowWithColumnsTs(tableName, row, null,
-                                 timestamp, attributes);
-    }
-
-    @Override
-    public List<TRowResult> getRowWithColumnsTs(
-        ByteBuffer tableName, ByteBuffer row, List<ByteBuffer> columns,
-        long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        if (columns == null) {
-          Get get = new Get(getBytes(row));
-          addAttributes(get, attributes);
-          get.setTimeRange(0, timestamp);
-          Result result = table.get(get);
-          return ThriftUtilities.rowResultFromHBase(result);
-        }
-        Get get = new Get(getBytes(row));
-        addAttributes(get, attributes);
-        for(ByteBuffer column : columns) {
-          byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
-          if (famAndQf.length == 1) {
-              get.addFamily(famAndQf[0]);
-          } else {
-              get.addColumn(famAndQf[0], famAndQf[1]);
-          }
-        }
-        get.setTimeRange(0, timestamp);
-        Result result = table.get(get);
-        return ThriftUtilities.rowResultFromHBase(result);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally{
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public List<TRowResult> getRows(ByteBuffer tableName,
-                                    List<ByteBuffer> rows,
-        Map<ByteBuffer, ByteBuffer> attributes)
-        throws IOError {
-      return getRowsWithColumnsTs(tableName, rows, null,
-                                  HConstants.LATEST_TIMESTAMP,
-                                  attributes);
-    }
-
-    @Override
-    public List<TRowResult> getRowsWithColumns(ByteBuffer tableName,
-                                               List<ByteBuffer> rows,
-        List<ByteBuffer> columns,
-        Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      return getRowsWithColumnsTs(tableName, rows, columns,
-                                  HConstants.LATEST_TIMESTAMP,
-                                  attributes);
-    }
-
-    @Override
-    public List<TRowResult> getRowsTs(ByteBuffer tableName,
-                                      List<ByteBuffer> rows,
-        long timestamp,
-        Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      return getRowsWithColumnsTs(tableName, rows, null,
-                                  timestamp, attributes);
-    }
-
-    @Override
-    public List<TRowResult> getRowsWithColumnsTs(ByteBuffer tableName,
-                                                 List<ByteBuffer> rows,
-        List<ByteBuffer> columns, long timestamp,
-        Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      
-      Table table= null;
-      try {
-        List<Get> gets = new ArrayList<Get>(rows.size());
-        table = getTable(tableName);
-        if (metrics != null) {
-          metrics.incNumRowKeysInBatchGet(rows.size());
-        }
-        for (ByteBuffer row : rows) {
-          Get get = new Get(getBytes(row));
-          addAttributes(get, attributes);
-          if (columns != null) {
-
-            for(ByteBuffer column : columns) {
-              byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
-              if (famAndQf.length == 1) {
-                get.addFamily(famAndQf[0]);
-              } else {
-                get.addColumn(famAndQf[0], famAndQf[1]);
-              }
-            }
-          }
-          get.setTimeRange(0, timestamp);
-          gets.add(get);
-        }
-        Result[] result = table.get(gets);
-        return ThriftUtilities.rowResultFromHBase(result);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally{
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public void deleteAll(
-        ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
-        Map<ByteBuffer, ByteBuffer> attributes)
-        throws IOError {
-      deleteAllTs(tableName, row, column, HConstants.LATEST_TIMESTAMP,
-                  attributes);
-    }
-
-    @Override
-    public void deleteAllTs(ByteBuffer tableName,
-                            ByteBuffer row,
-                            ByteBuffer column,
-        long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        Delete delete  = new Delete(getBytes(row));
-        addAttributes(delete, attributes);
-        byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
-        if (famAndQf.length == 1) {
-          delete.deleteFamily(famAndQf[0], timestamp);
-        } else {
-          delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
-        }
-        table.delete(delete);
-
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally {
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public void deleteAllRow(
-        ByteBuffer tableName, ByteBuffer row,
-        Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      deleteAllRowTs(tableName, row, HConstants.LATEST_TIMESTAMP, attributes);
-    }
-
-    @Override
-    public void deleteAllRowTs(
-        ByteBuffer tableName, ByteBuffer row, long timestamp,
-        Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        Delete delete  = new Delete(getBytes(row), timestamp);
-        addAttributes(delete, attributes);
-        table.delete(delete);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally {
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public void createTable(ByteBuffer in_tableName,
-        List<ColumnDescriptor> columnFamilies) throws IOError,
-        IllegalArgument, AlreadyExists {
-      TableName tableName = getTableName(in_tableName);
-      try {
-        if (getAdmin().tableExists(tableName)) {
-          throw new AlreadyExists("table name already in use");
-        }
-        HTableDescriptor desc = new HTableDescriptor(tableName);
-        for (ColumnDescriptor col : columnFamilies) {
-          HColumnDescriptor colDesc = ThriftUtilities.colDescFromThrift(col);
-          desc.addFamily(colDesc);
-        }
-        getAdmin().createTable(desc);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } catch (IllegalArgumentException e) {
-        LOG.warn(e.getMessage(), e);
-        throw new IllegalArgument(Throwables.getStackTraceAsString(e));
-      }
-    }
-
-    private static TableName getTableName(ByteBuffer buffer) {
-      return TableName.valueOf(getBytes(buffer));
-    }
-
-    @Override
-    public void deleteTable(ByteBuffer in_tableName) throws IOError {
-      TableName tableName = getTableName(in_tableName);
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("deleteTable: table=" + tableName);
-      }
-      try {
-        if (!getAdmin().tableExists(tableName)) {
-          throw new IOException("table does not exist");
-        }
-        getAdmin().deleteTable(tableName);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      }
-    }
-
-    @Override
-    public void mutateRow(ByteBuffer tableName, ByteBuffer row,
-        List<Mutation> mutations, Map<ByteBuffer, ByteBuffer> attributes)
-        throws IOError, IllegalArgument {
-      mutateRowTs(tableName, row, mutations, HConstants.LATEST_TIMESTAMP,
-                  attributes);
-    }
-
-    @Override
-    public void mutateRowTs(ByteBuffer tableName, ByteBuffer row,
-        List<Mutation> mutations, long timestamp,
-        Map<ByteBuffer, ByteBuffer> attributes)
-        throws IOError, IllegalArgument {
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        Put put = new Put(getBytes(row), timestamp);
-        addAttributes(put, attributes);
-
-        Delete delete = new Delete(getBytes(row));
-        addAttributes(delete, attributes);
-        if (metrics != null) {
-          metrics.incNumRowKeysInBatchMutate(mutations.size());
-        }
-
-        // I apologize for all this mess :)
-        for (Mutation m : mutations) {
-          byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column));
-          if (m.isDelete) {
-            if (famAndQf.length == 1) {
-              delete.deleteFamily(famAndQf[0], timestamp);
-            } else {
-              delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
-            }
-            delete.setDurability(m.writeToWAL ? Durability.SYNC_WAL
-                : Durability.SKIP_WAL);
-          } else {
-            if(famAndQf.length == 1) {
-              LOG.warn("No column qualifier specified. Delete is the only mutation supported "
-                  + "over the whole column family.");
-            } else {
-              put.addImmutable(famAndQf[0], famAndQf[1],
-                  m.value != null ? getBytes(m.value)
-                      : HConstants.EMPTY_BYTE_ARRAY);
-            }
-            put.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
-          }
-        }
-        if (!delete.isEmpty())
-          table.delete(delete);
-        if (!put.isEmpty())
-          table.put(put);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } catch (IllegalArgumentException e) {
-        LOG.warn(e.getMessage(), e);
-        throw new IllegalArgument(Throwables.getStackTraceAsString(e));
-      } finally{
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public void mutateRows(ByteBuffer tableName, List<BatchMutation> rowBatches,
-        Map<ByteBuffer, ByteBuffer> attributes)
-        throws IOError, IllegalArgument, TException {
-      mutateRowsTs(tableName, rowBatches, HConstants.LATEST_TIMESTAMP, attributes);
-    }
-
-    @Override
-    public void mutateRowsTs(
-        ByteBuffer tableName, List<BatchMutation> rowBatches, long timestamp,
-        Map<ByteBuffer, ByteBuffer> attributes)
-        throws IOError, IllegalArgument, TException {
-      List<Put> puts = new ArrayList<Put>();
-      List<Delete> deletes = new ArrayList<Delete>();
-
-      for (BatchMutation batch : rowBatches) {
-        byte[] row = getBytes(batch.row);
-        List<Mutation> mutations = batch.mutations;
-        Delete delete = new Delete(row);
-        addAttributes(delete, attributes);
-        Put put = new Put(row, timestamp);
-        addAttributes(put, attributes);
-        for (Mutation m : mutations) {
-          byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column));
-          if (m.isDelete) {
-            // no qualifier, family only.
-            if (famAndQf.length == 1) {
-              delete.deleteFamily(famAndQf[0], timestamp);
-            } else {
-              delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
-            }
-            delete.setDurability(m.writeToWAL ? Durability.SYNC_WAL
-                : Durability.SKIP_WAL);
-          } else {
-            if (famAndQf.length == 1) {
-              LOG.warn("No column qualifier specified. Delete is the only mutation supported "
-                  + "over the whole column family.");
-            }
-            if (famAndQf.length == 2) {
-              put.addImmutable(famAndQf[0], famAndQf[1],
-                  m.value != null ? getBytes(m.value)
-                      : HConstants.EMPTY_BYTE_ARRAY);
-            } else {
-              throw new IllegalArgumentException("Invalid famAndQf provided.");
-            }
-            put.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
-          }
-        }
-        if (!delete.isEmpty())
-          deletes.add(delete);
-        if (!put.isEmpty())
-          puts.add(put);
-      }
-
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        if (!puts.isEmpty())
-          table.put(puts);
-        if (!deletes.isEmpty())
-          table.delete(deletes);
-
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } catch (IllegalArgumentException e) {
-        LOG.warn(e.getMessage(), e);
-        throw new IllegalArgument(Throwables.getStackTraceAsString(e));
-      } finally{
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public long atomicIncrement(
-        ByteBuffer tableName, ByteBuffer row, ByteBuffer column, long amount)
-            throws IOError, IllegalArgument, TException {
-      byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
-      if(famAndQf.length == 1) {
-        return atomicIncrement(tableName, row, famAndQf[0], HConstants.EMPTY_BYTE_ARRAY, amount);
-      }
-      return atomicIncrement(tableName, row, famAndQf[0], famAndQf[1], amount);
-    }
-
-    protected long atomicIncrement(ByteBuffer tableName, ByteBuffer row,
-        byte [] family, byte [] qualifier, long amount)
-        throws IOError, IllegalArgument, TException {
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        return table.incrementColumnValue(
-            getBytes(row), family, qualifier, amount);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally {
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public void scannerClose(int id) throws IOError, IllegalArgument {
-      LOG.debug("scannerClose: id=" + id);
-      ResultScannerWrapper resultScannerWrapper = getScanner(id);
-      if (resultScannerWrapper == null) {
-        String message = "scanner ID is invalid";
-        LOG.warn(message);
-        throw new IllegalArgument("scanner ID is invalid");
-      }
-      resultScannerWrapper.getScanner().close();
-      removeScanner(id);
-    }
-
-    @Override
-    public List<TRowResult> scannerGetList(int id,int nbRows)
-        throws IllegalArgument, IOError {
-      LOG.debug("scannerGetList: id=" + id);
-      ResultScannerWrapper resultScannerWrapper = getScanner(id);
-      if (null == resultScannerWrapper) {
-        String message = "scanner ID is invalid";
-        LOG.warn(message);
-        throw new IllegalArgument("scanner ID is invalid");
-      }
-
-      Result [] results = null;
-      try {
-        results = resultScannerWrapper.getScanner().next(nbRows);
-        if (null == results) {
-          return new ArrayList<TRowResult>();
-        }
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      }
-      return ThriftUtilities.rowResultFromHBase(results, resultScannerWrapper.isColumnSorted());
-    }
-
-    @Override
-    public List<TRowResult> scannerGet(int id) throws IllegalArgument, IOError {
-      return scannerGetList(id,1);
-    }
-
-    @Override
-    public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
-        Map<ByteBuffer, ByteBuffer> attributes)
-        throws IOError {
-      
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        Scan scan = new Scan();
-        addAttributes(scan, attributes);
-        if (tScan.isSetStartRow()) {
-          scan.setStartRow(tScan.getStartRow());
-        }
-        if (tScan.isSetStopRow()) {
-          scan.setStopRow(tScan.getStopRow());
-        }
-        if (tScan.isSetTimestamp()) {
-          scan.setTimeRange(0, tScan.getTimestamp());
-        }
-        if (tScan.isSetCaching()) {
-          scan.setCaching(tScan.getCaching());
-        }
-        if (tScan.isSetBatchSize()) {
-          scan.setBatch(tScan.getBatchSize());
-        }
-        if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
-          for(ByteBuffer column : tScan.getColumns()) {
-            byte [][] famQf = KeyValue.parseColumn(getBytes(column));
-            if(famQf.length == 1) {
-              scan.addFamily(famQf[0]);
-            } else {
-              scan.addColumn(famQf[0], famQf[1]);
-            }
-          }
-        }
-        if (tScan.isSetFilterString()) {
-          ParseFilter parseFilter = new ParseFilter();
-          scan.setFilter(
-              parseFilter.parseFilterString(tScan.getFilterString()));
-        }
-        if (tScan.isSetReversed()) {
-          scan.setReversed(tScan.isReversed());
-        }
-        if (tScan.isSetCacheBlocks()) {
-          scan.setCacheBlocks(tScan.isCacheBlocks());
-        }
-        return addScanner(table.getScanner(scan), tScan.sortColumns);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally{
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public int scannerOpen(ByteBuffer tableName, ByteBuffer startRow,
-        List<ByteBuffer> columns,
-        Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
-      
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        Scan scan = new Scan(getBytes(startRow));
-        addAttributes(scan, attributes);
-        if(columns != null && columns.size() != 0) {
-          for(ByteBuffer column : columns) {
-            byte [][] famQf = KeyValue.parseColumn(getBytes(column));
-            if(famQf.length == 1) {
-              scan.addFamily(famQf[0]);
-            } else {
-              scan.addColumn(famQf[0], famQf[1]);
-            }
-          }
-        }
-        return addScanner(table.getScanner(scan), false);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally{
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public int scannerOpenWithStop(ByteBuffer tableName, ByteBuffer startRow,
-        ByteBuffer stopRow, List<ByteBuffer> columns,
-        Map<ByteBuffer, ByteBuffer> attributes)
-        throws IOError, TException {
-      
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        Scan scan = new Scan(getBytes(startRow), getBytes(stopRow));
-        addAttributes(scan, attributes);
-        if(columns != null && columns.size() != 0) {
-          for(ByteBuffer column : columns) {
-            byte [][] famQf = KeyValue.parseColumn(getBytes(column));
-            if(famQf.length == 1) {
-              scan.addFamily(famQf[0]);
-            } else {
-              scan.addColumn(famQf[0], famQf[1]);
-            }
-          }
-        }
-        return addScanner(table.getScanner(scan), false);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally{
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public int scannerOpenWithPrefix(ByteBuffer tableName,
-                                     ByteBuffer startAndPrefix,
-                                     List<ByteBuffer> columns,
-        Map<ByteBuffer, ByteBuffer> attributes)
-        throws IOError, TException {
-      
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        Scan scan = new Scan(getBytes(startAndPrefix));
-        addAttributes(scan, attributes);
-        Filter f = new WhileMatchFilter(
-            new PrefixFilter(getBytes(startAndPrefix)));
-        scan.setFilter(f);
-        if (columns != null && columns.size() != 0) {
-          for(ByteBuffer column : columns) {
-            byte [][] famQf = KeyValue.parseColumn(getBytes(column));
-            if(famQf.length == 1) {
-              scan.addFamily(famQf[0]);
-            } else {
-              scan.addColumn(famQf[0], famQf[1]);
-            }
-          }
-        }
-        return addScanner(table.getScanner(scan), false);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally{
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public int scannerOpenTs(ByteBuffer tableName, ByteBuffer startRow,
-        List<ByteBuffer> columns, long timestamp,
-        Map<ByteBuffer, ByteBuffer> attributes) throws IOError, TException {
-      
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        Scan scan = new Scan(getBytes(startRow));
-        addAttributes(scan, attributes);
-        scan.setTimeRange(0, timestamp);
-        if (columns != null && columns.size() != 0) {
-          for (ByteBuffer column : columns) {
-            byte [][] famQf = KeyValue.parseColumn(getBytes(column));
-            if(famQf.length == 1) {
-              scan.addFamily(famQf[0]);
-            } else {
-              scan.addColumn(famQf[0], famQf[1]);
-            }
-          }
-        }
-        return addScanner(table.getScanner(scan), false);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally{
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public int scannerOpenWithStopTs(ByteBuffer tableName, ByteBuffer startRow,
-        ByteBuffer stopRow, List<ByteBuffer> columns, long timestamp,
-        Map<ByteBuffer, ByteBuffer> attributes)
-        throws IOError, TException {
-      
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        Scan scan = new Scan(getBytes(startRow), getBytes(stopRow));
-        addAttributes(scan, attributes);
-        scan.setTimeRange(0, timestamp);
-        if (columns != null && columns.size() != 0) {
-          for (ByteBuffer column : columns) {
-            byte [][] famQf = KeyValue.parseColumn(getBytes(column));
-            if(famQf.length == 1) {
-              scan.addFamily(famQf[0]);
-            } else {
-              scan.addColumn(famQf[0], famQf[1]);
-            }
-          }
-        }
-        scan.setTimeRange(0, timestamp);
-        return addScanner(table.getScanner(scan), false);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally{
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public Map<ByteBuffer, ColumnDescriptor> getColumnDescriptors(
-        ByteBuffer tableName) throws IOError, TException {
-      
-      Table table = null;
-      try {
-        TreeMap<ByteBuffer, ColumnDescriptor> columns =
-          new TreeMap<ByteBuffer, ColumnDescriptor>();
-
-        table = getTable(tableName);
-        HTableDescriptor desc = table.getTableDescriptor();
-
-        for (HColumnDescriptor e : desc.getFamilies()) {
-          ColumnDescriptor col = ThriftUtilities.colDescFromHbase(e);
-          columns.put(col.name, col);
-        }
-        return columns;
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally {
-        closeTable(table);
-      }
-    }
-
-    @Deprecated
-    @Override
-    public List<TCell> getRowOrBefore(ByteBuffer tableName, ByteBuffer row,
-        ByteBuffer family) throws IOError {
-      try {
-        Result result = getRowOrBefore(getBytes(tableName), getBytes(row), getBytes(family));
-        return ThriftUtilities.cellFromHBase(result.rawCells());
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      }
-    }
-
-    @Override
-    public TRegionInfo getRegionInfo(ByteBuffer searchRow) throws IOError {
-      try {
-        byte[] row = getBytes(searchRow);
-        Result startRowResult =
-            getRowOrBefore(TableName.META_TABLE_NAME.getName(), row, HConstants.CATALOG_FAMILY);
-
-        if (startRowResult == null) {
-          throw new IOException("Cannot find row in "+ TableName.META_TABLE_NAME+", row="
-                                + Bytes.toStringBinary(row));
-        }
-
-        // find region start and end keys
-        HRegionInfo regionInfo = HRegionInfo.getHRegionInfo(startRowResult);
-        if (regionInfo == null) {
-          throw new IOException("HRegionInfo REGIONINFO was null or " +
-                                " empty in Meta for row="
-                                + Bytes.toStringBinary(row));
-        }
-        TRegionInfo region = new TRegionInfo();
-        region.setStartKey(regionInfo.getStartKey());
-        region.setEndKey(regionInfo.getEndKey());
-        region.id = regionInfo.getRegionId();
-        region.setName(regionInfo.getRegionName());
-        region.version = regionInfo.getVersion();
-
-        // find region assignment to server
-        ServerName serverName = HRegionInfo.getServerName(startRowResult);
-        if (serverName != null) {
-          region.setServerName(Bytes.toBytes(serverName.getHostname()));
-          region.port = serverName.getPort();
-        }
-        return region;
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      }
-    }
-
-    private void closeTable(Table table) throws IOError
-    {
-      try{
-        if(table != null){
-          table.close();
-        }
-      } catch (IOException e){
-        LOG.error(e.getMessage(), e);
-        throw new IOError(Throwables.getStackTraceAsString(e));
-      }
-    }
-    
-    private Result getRowOrBefore(byte[] tableName, byte[] row, byte[] family) throws IOException {
-      Scan scan = new Scan(row);
-      scan.setReversed(true);
-      scan.addFamily(family);
-      scan.setStartRow(row);
-      Table table = getTable(tableName);      
-      try (ResultScanner scanner = table.getScanner(scan)) {
-        return scanner.next();
-      } finally{
-        if(table != null){
-          table.close();
-        }
-      }
-    }
-
-    private void initMetrics(ThriftMetrics metrics) {
-      this.metrics = metrics;
-    }
-
-    @Override
-    public void increment(TIncrement tincrement) throws IOError, TException {
-
-      if (tincrement.getRow().length == 0 || tincrement.getTable().length == 0) {
-        throw new TException("Must supply a table and a row key; can't increment");
-      }
-
-      if (conf.getBoolean(COALESCE_INC_KEY, false)) {
-        this.coalescer.queueIncrement(tincrement);
-        return;
-      }
-
-      Table table = null;
-      try {
-        table = getTable(tincrement.getTable());
-        Increment inc = ThriftUtilities.incrementFromThrift(tincrement);
-        table.increment(inc);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally{
-        closeTable(table);
-      }
-    }
-
-    @Override
-    public void incrementRows(List<TIncrement> tincrements) throws IOError, TException {
-      if (conf.getBoolean(COALESCE_INC_KEY, false)) {
-        this.coalescer.queueIncrements(tincrements);
-        return;
-      }
-      for (TIncrement tinc : tincrements) {
-        increment(tinc);
-      }
-    }
-
-    @Override
-    public List<TCell> append(TAppend tappend) throws IOError, TException {
-      if (tappend.getRow().length == 0 || tappend.getTable().length == 0) {
-        throw new TException("Must supply a table and a row key; can't append");
-      }
-
-      Table table = null;
-      try {
-        table = getTable(tappend.getTable());
-        Append append = ThriftUtilities.appendFromThrift(tappend);
-        Result result = table.append(append);
-        return ThriftUtilities.cellFromHBase(result.rawCells());
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } finally{
-          closeTable(table);
-      }
-    }
-
-    @Override
-    public boolean checkAndPut(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
-        ByteBuffer value, Mutation mput, Map<ByteBuffer, ByteBuffer> attributes) throws IOError,
-        IllegalArgument, TException {
-      Put put;
-      try {
-        put = new Put(getBytes(row), HConstants.LATEST_TIMESTAMP);
-        addAttributes(put, attributes);
-
-        byte[][] famAndQf = KeyValue.parseColumn(getBytes(mput.column));
-
-        put.addImmutable(famAndQf[0], famAndQf[1], mput.value != null ? getBytes(mput.value)
-            : HConstants.EMPTY_BYTE_ARRAY);
-
-        put.setDurability(mput.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
-      } catch (IllegalArgumentException e) {
-        LOG.warn(e.getMessage(), e);
-        throw new IllegalArgument(Throwables.getStackTraceAsString(e));
-      }
-
-      Table table = null;
-      try {
-        table = getTable(tableName);
-        byte[][] famAndQf = KeyValue.parseColumn(getBytes(column));
-        return table.checkAndPut(getBytes(row), famAndQf[0], famAndQf[1],
-          value != null ? getBytes(value) : HConstants.EMPTY_BYTE_ARRAY, put);
-      } catch (IOException e) {
-        LOG.warn(e.getMessage(), e);
-        throw getIOError(e);
-      } catch (IllegalArgumentException e) {
-        LOG.warn(e.getMessage(), e);
-        throw new IllegalArgument(Throwables.getStackTraceAsString(e));
-      } finally {
-          closeTable(table);
-      }
-    }
-  }
-
-
-  private static IOError getIOError(Throwable throwable) {
-    IOError error = new IOErrorWithCause(throwable);
-    error.setMessage(Throwables.getStackTraceAsString(throwable));
-    return error;
-  }
-
-  /**
-   * Adds all the attributes into the Operation object
-   */
-  private static void addAttributes(OperationWithAttributes op,
-    Map<ByteBuffer, ByteBuffer> attributes) {
-    if (attributes == null || attributes.size() == 0) {
-      return;
-    }
-    for (Map.Entry<ByteBuffer, ByteBuffer> entry : attributes.entrySet()) {
-      String name = Bytes.toStringBinary(getBytes(entry.getKey()));
-      byte[] value =  getBytes(entry.getValue());
-      op.setAttribute(name, value);
-    }
-  }
-
-  public static void registerFilters(Configuration conf) {
-    String[] filters = conf.getStrings("hbase.thrift.filters");
-    if(filters != null) {
-      for(String filterClass: filters) {
-        String[] filterPart = filterClass.split(":");
-        if(filterPart.length != 2) {
-          LOG.warn("Invalid filter specification " + filterClass + " - skipping");
-        } else {
-          ParseFilter.registerFilter(filterPart[0], filterPart[1]);
-        }
-      }
-    }
-  }
-
-  public static class IOErrorWithCause extends IOError {
-    private static final long serialVersionUID = 3545290006843153883L;
-
-    private Throwable cause;
-    public IOErrorWithCause(Throwable cause) {
-      this.cause = cause;
-    }
-
-    @Override
-    public synchronized Throwable getCause() {
-      return cause;
-    }
-
-    @Override
-    public boolean equals(Object other) {
-      if (super.equals(other) &&
-          other instanceof IOErrorWithCause) {
-        Throwable otherCause = ((IOErrorWithCause) other).getCause();
-        if (this.getCause() != null) {
-          return otherCause != null && this.getCause().equals(otherCause);
-        } else {
-          return otherCause == null;
-        }
-      }
-      return false;
-    }
-
-    @Override
-    public int hashCode() {
-      int result = super.hashCode();
-      result = 31 * result + (cause != null ? cause.hashCode() : 0);
-      return result;
-    }
-  }
-}
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
index c3375a3..8cf5de9 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
@@ -26,11 +26,11 @@ import java.util.List;
 import java.util.Locale;
 import java.util.TreeMap;
 
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Result;
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java
index d648818..3522336 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java
@@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift.generated;
  * An AlreadyExists exceptions signals that a table with the specified
  * name already exists
  */
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-04-16")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2021-02-09")
 public class AlreadyExists extends org.apache.thrift.TException implements org.apache.thrift.TBase<AlreadyExists, AlreadyExists._Fields>, java.io.Serializable, Cloneable, Comparable<AlreadyExists> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AlreadyExists");
 
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java
index aea93f6..e87c40a 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java
@@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
 /**
  * A BatchMutation object is used to apply a number of Mutations to a single row.
  */
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-04-16")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2021-02-09")
 public class BatchMutation implements org.apache.thrift.TBase<BatchMutation, BatchMutation._Fields>, java.io.Serializable, Cloneable, Comparable<BatchMutation> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BatchMutation");
 
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java
index fdff24c..cfcbe67 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java
@@ -12,7 +12,7 @@ package org.apache.hadoop.hbase.thrift.generated;
  * such as the number of versions, compression settings, etc. It is
  * used as input when creating a table or adding a column.
  */
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-04-16")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2021-02-09")
 public class ColumnDescriptor implements org.apache.thrift.TBase<ColumnDescriptor, ColumnDescriptor._Fields>, java.io.Serializable, Cloneable, Comparable<ColumnDescriptor> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ColumnDescriptor");
 
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java
index c7207f2..67e0deb 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java
@@ -7,7 +7,7 @@
 package org.apache.hadoop.hbase.thrift.generated;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-04-16")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2021-02-09")
 public class Hbase {
 
   public interface Iface {
@@ -575,19 +575,6 @@ public class Hbase {
     public void scannerClose(int id) throws IOError, IllegalArgument, org.apache.thrift.TException;
 
     /**
-     * Get the row just before the specified one.
-     * 
-     * @return value for specified row/column
-     * 
-     * @param tableName name of table
-     * 
-     * @param row row key
-     * 
-     * @param family column name
-     */
-    public java.util.List<TCell> getRowOrBefore(java.nio.ByteBuffer tableName, java.nio.ByteBuffer row, java.nio.ByteBuffer family) throws IOError, org.apache.thrift.TException;
-
-    /**
      * Get the regininfo for the specified row. It scans
      * the metatable to find region's start and end keys.
      * 
@@ -628,6 +615,13 @@ public class Hbase {
      */
     public boolean checkAndPut(java.nio.ByteBuffer tableName, java.nio.ByteBuffer row, java.nio.ByteBuffer column, java.nio.ByteBuffer value, Mutation mput, java.util.Map<java.nio.ByteBuffer,java.nio.ByteBuffer> attributes) throws IOError, IllegalArgument, org.apache.thrift.TException;
 
+    /**
+     * Get the type of this thrift server.
+     * 
+     * @return the type of this thrift server
+     */
+    public TThriftServerType getThriftServerType() throws org.apache.thrift.TException;
+
   }
 
   public interface AsyncIface {
@@ -714,14 +708,14 @@ public class Hbase {
 
     public void scannerClose(int id, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws org.apache.thrift.TException;
 
-    public void getRowOrBefore(java.nio.ByteBuffer tableName, java.nio.ByteBuffer row, java.nio.ByteBuffer family, org.apache.thrift.async.AsyncMethodCallback<java.util.List<TCell>> resultHandler) throws org.apache.thrift.TException;
-
     public void getRegionInfo(java.nio.ByteBuffer row, org.apache.thrift.async.AsyncMethodCallback<TRegionInfo> resultHandler) throws org.apache.thrift.TException;
 
     public void append(TAppend append, org.apache.thrift.async.AsyncMethodCallback<java.util.List<TCell>> resultHandler) throws org.apache.thrift.TException;
 
     public void checkAndPut(java.nio.ByteBuffer tableName, java.nio.ByteBuffer row, java.nio.ByteBuffer column, java.nio.ByteBuffer value, Mutation mput, java.util.Map<java.nio.ByteBuffer,java.nio.ByteBuffer> attributes, org.apache.thrift.async.AsyncMethodCallback<java.lang.Boolean> resultHandler) throws org.apache.thrift.TException;
 
+    public void getThriftServerType(org.apache.thrift.async.AsyncMethodCallback<TThriftServerType> resultHandler) throws org.apache.thrift.TException;
+
   }
 
   public static class Client extends org.apache.thrift.TServiceClient implements Iface {
@@ -1874,34 +1868,6 @@ public class Hbase {
       return;
     }
 
-    public java.util.List<TCell> getRowOrBefore(java.nio.ByteBuffer tableName, java.nio.ByteBuffer row, java.nio.ByteBuffer family) throws IOError, org.apache.thrift.TException
-    {
-      send_getRowOrBefore(tableName, row, family);
-      return recv_getRowOrBefore();
-    }
-
-    public void send_getRowOrBefore(java.nio.ByteBuffer tableName, java.nio.ByteBuffer row, java.nio.ByteBuffer family) throws org.apache.thrift.TException
-    {
-      getRowOrBefore_args args = new getRowOrBefore_args();
-      args.setTableName(tableName);
-      args.setRow(row);
-      args.setFamily(family);
-      sendBase("getRowOrBefore", args);
-    }
-
-    public java.util.List<TCell> recv_getRowOrBefore() throws IOError, org.apache.thrift.TException
-    {
-      getRowOrBefore_result result = new getRowOrBefore_result();
-      receiveBase(result, "getRowOrBefore");
-      if (result.isSetSuccess()) {
-        return result.success;
-      }
-      if (result.io != null) {
-        throw result.io;
-      }
-      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getRowOrBefore failed: unknown result");
-    }
-
     public TRegionInfo getRegionInfo(java.nio.ByteBuffer row) throws IOError, org.apache.thrift.TException
     {
       send_getRegionInfo(row);
@@ -1988,6 +1954,28 @@ public class Hbase {
       throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "checkAndPut failed: unknown result");
     }
 
+    public TThriftServerType getThriftServerType() throws org.apache.thrift.TException
+    {
+      send_getThriftServerType();
+      return recv_getThriftServerType();
+    }
+
+    public void send_getThriftServerType() throws org.apache.thrift.TException
+    {
+      getThriftServerType_args args = new getThriftServerType_args();
+      sendBase("getThriftServerType", args);
+    }
+
+    public TThriftServerType recv_getThriftServerType() throws org.apache.thrift.TException
+    {
+      getThriftServerType_result result = new getThriftServerType_result();
+      receiveBase(result, "getThriftServerType");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getThriftServerType failed: unknown result");
+    }
+
   }
   public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
     public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
@@ -3573,44 +3561,6 @@ public class Hbase {
       }
     }
 
-    public void getRowOrBefore(java.nio.ByteBuffer tableName, java.nio.ByteBuffer row, java.nio.ByteBuffer family, org.apache.thrift.async.AsyncMethodCallback<java.util.List<TCell>> resultHandler) throws org.apache.thrift.TException {
-      checkReady();
-      getRowOrBefore_call method_call = new getRowOrBefore_call(tableName, row, family, resultHandler, this, ___protocolFactory, ___transport);
-      this.___currentMethod = method_call;
-      ___manager.call(method_call);
-    }
-
-    public static class getRowOrBefore_call extends org.apache.thrift.async.TAsyncMethodCall<java.util.List<TCell>> {
-      private java.nio.ByteBuffer tableName;
-      private java.nio.ByteBuffer row;
-      private java.nio.ByteBuffer family;
-      public getRowOrBefore_call(java.nio.ByteBuffer tableName, java.nio.ByteBuffer row, java.nio.ByteBuffer family, org.apache.thrift.async.AsyncMethodCallback<java.util.List<TCell>> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
-        super(client, protocolFactory, transport, resultHandler, false);
-        this.tableName = tableName;
-        this.row = row;
-        this.family = family;
-      }
-
-      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
-        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getRowOrBefore", org.apache.thrift.protocol.TMessageType.CALL, 0));
-        getRowOrBefore_args args = new getRowOrBefore_args();
-        args.setTableName(tableName);
-        args.setRow(row);
-        args.setFamily(family);
-        args.write(prot);
-        prot.writeMessageEnd();
-      }
-
-      public java.util.List<TCell> getResult() throws IOError, org.apache.thrift.TException {
-        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
-          throw new java.lang.IllegalStateException("Method call not finished!");
-        }
-        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
-        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
-        return (new Client(prot)).recv_getRowOrBefore();
-      }
-    }
-
     public void getRegionInfo(java.nio.ByteBuffer row, org.apache.thrift.async.AsyncMethodCallback<TRegionInfo> resultHandler) throws org.apache.thrift.TException {
       checkReady();
       getRegionInfo_call method_call = new getRegionInfo_call(row, resultHandler, this, ___protocolFactory, ___transport);
@@ -3722,6 +3672,35 @@ public class Hbase {
       }
     }
 
+    public void getThriftServerType(org.apache.thrift.async.AsyncMethodCallback<TThriftServerType> resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      getThriftServerType_call method_call = new getThriftServerType_call(resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class getThriftServerType_call extends org.apache.thrift.async.TAsyncMethodCall<TThriftServerType> {
+      public getThriftServerType_call(org.apache.thrift.async.AsyncMethodCallback<TThriftServerType> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getThriftServerType", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        getThriftServerType_args args = new getThriftServerType_args();
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public TThriftServerType getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new java.lang.IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_getThriftServerType();
+      }
+    }
+
   }
 
   public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {
@@ -3776,10 +3755,10 @@ public class Hbase {
       processMap.put("scannerGet", new scannerGet());
       processMap.put("scannerGetList", new scannerGetList());
       processMap.put("scannerClose", new scannerClose());
-      processMap.put("getRowOrBefore", new getRowOrBefore());
       processMap.put("getRegionInfo", new getRegionInfo());
       processMap.put("append", new append());
       processMap.put("checkAndPut", new checkAndPut());
+      processMap.put("getThriftServerType", new getThriftServerType());
       return processMap;
     }
 
@@ -5000,35 +4979,6 @@ public class Hbase {
       }
     }
 
-    public static class getRowOrBefore<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getRowOrBefore_args> {
-      public getRowOrBefore() {
-        super("getRowOrBefore");
-      }
-
-      public getRowOrBefore_args getEmptyArgsInstance() {
-        return new getRowOrBefore_args();
-      }
-
-      protected boolean isOneway() {
-        return false;
-      }
-
-      @Override
-      protected boolean rethrowUnhandledExceptions() {
-        return false;
-      }
-
-      public getRowOrBefore_result getResult(I iface, getRowOrBefore_args args) throws org.apache.thrift.TException {
-        getRowOrBefore_result result = new getRowOrBefore_result();
-        try {
-          result.success = iface.getRowOrBefore(args.tableName, args.row, args.family);
-        } catch (IOError io) {
-          result.io = io;
-        }
-        return result;
-      }
-    }
-
     public static class getRegionInfo<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getRegionInfo_args> {
       public getRegionInfo() {
         super("getRegionInfo");
@@ -5119,6 +5069,31 @@ public class Hbase {
       }
     }
 
+    public static class getThriftServerType<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getThriftServerType_args> {
+      public getThriftServerType() {
+        super("getThriftServerType");
+      }
+
+      public getThriftServerType_args getEmptyArgsInstance() {
+        return new getThriftServerType_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      @Override
+      protected boolean rethrowUnhandledExceptions() {
+        return false;
+      }
+
+      public getThriftServerType_result getResult(I iface, getThriftServerType_args args) throws org.apache.thrift.TException {
+        getThriftServerType_result result = new getThriftServerType_result();
+        result.success = iface.getThriftServerType();
+        return result;
+      }
+    }
+
   }
 
   public static class AsyncProcessor<I extends AsyncIface> extends org.apache.thrift.TBaseAsyncProcessor<I> {
@@ -5173,10 +5148,10 @@ public class Hbase {
       processMap.put("scannerGet", new scannerGet());
       processMap.put("scannerGetList", new scannerGetList());
       processMap.put("scannerClose", new scannerClose());
-      processMap.put("getRowOrBefore", new getRowOrBefore());
       processMap.put("getRegionInfo", new getRegionInfo());
       processMap.put("append", new append());
       processMap.put("checkAndPut", new checkAndPut());
+      processMap.put("getThriftServerType", new getThriftServerType());
       return processMap;
     }
 
@@ -7876,20 +7851,20 @@ public class Hbase {
       }
     }
 
-    public static class getRowOrBefore<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, getRowOrBefore_args, java.util.List<TCell>> {
-      public getRowOrBefore() {
-        super("getRowOrBefore");
+    public static class getRegionInfo<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, getRegionInfo_args, TRegionInfo> {
+      public getRegionInfo() {
+        super("getRegionInfo");
       }
 
-      public getRowOrBefore_args getEmptyArgsInstance() {
-        return new getRowOrBefore_args();
+      public getRegionInfo_args getEmptyArgsInstance() {
+        return new getRegionInfo_args();
       }
 
-      public org.apache.thrift.async.AsyncMethodCallback<java.util.List<TCell>> getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {
+      public org.apache.thrift.async.AsyncMethodCallback<TRegionInfo> getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {
         final org.apache.thrift.AsyncProcessFunction fcall = this;
-        return new org.apache.thrift.async.AsyncMethodCallback<java.util.List<TCell>>() { 
-          public void onComplete(java.util.List<TCell> o) {
-            getRowOrBefore_result result = new getRowOrBefore_result();
+        return new org.apache.thrift.async.AsyncMethodCallback<TRegionInfo>() { 
+          public void onComplete(TRegionInfo o) {
+            getRegionInfo_result result = new getRegionInfo_result();
             result.success = o;
             try {
               fcall.sendResponse(fb, result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
@@ -7904,7 +7879,7 @@ public class Hbase {
           public void onError(java.lang.Exception e) {
             byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
             org.apache.thrift.TSerializable msg;
-            getRowOrBefore_result result = new getRowOrBefore_result();
+            getRegionInfo_result result = new getRegionInfo_result();
             if (e instanceof IOError) {
               result.io = (IOError) e;
               result.setIoIsSet(true);
@@ -7936,25 +7911,25 @@ public class Hbase {
         return false;
       }
 
-      public void start(I iface, getRowOrBefore_args args, org.apache.thrift.async.AsyncMethodCallback<java.util.List<TCell>> resultHandler) throws org.apache.thrift.TException {
-        iface.getRowOrBefore(args.tableName, args.row, args.family,resultHandler);
+      public void start(I iface, getRegionInfo_args args, org.apache.thrift.async.AsyncMethodCallback<TRegionInfo> resultHandler) throws org.apache.thrift.TException {
+        iface.getRegionInfo(args.row,resultHandler);
       }
     }
 
-    public static class getRegionInfo<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, getRegionInfo_args, TRegionInfo> {
-      public getRegionInfo() {
-        super("getRegionInfo");
+    public static class append<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, append_args, java.util.List<TCell>> {
+      public append() {
+        super("append");
       }
 
-      public getRegionInfo_args getEmptyArgsInstance() {
-        return new getRegionInfo_args();
+      public append_args getEmptyArgsInstance() {
+        return new append_args();
       }
 
-      public org.apache.thrift.async.AsyncMethodCallback<TRegionInfo> getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {
+      public org.apache.thrift.async.AsyncMethodCallback<java.util.List<TCell>> getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {
         final org.apache.thrift.AsyncProcessFunction fcall = this;
-        return new org.apache.thrift.async.AsyncMethodCallback<TRegionInfo>() { 
-          public void onComplete(TRegionInfo o) {
-            getRegionInfo_result result = new getRegionInfo_result();
+        return new org.apache.thrift.async.AsyncMethodCallback<java.util.List<TCell>>() { 
+          public void onComplete(java.util.List<TCell> o) {
+            append_result result = new append_result();
             result.success = o;
             try {
               fcall.sendResponse(fb, result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
@@ -7969,7 +7944,7 @@ public class Hbase {
           public void onError(java.lang.Exception e) {
             byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
             org.apache.thrift.TSerializable msg;
-            getRegionInfo_result result = new getRegionInfo_result();
+            append_result result = new append_result();
             if (e instanceof IOError) {
               result.io = (IOError) e;
               result.setIoIsSet(true);
@@ -8001,26 +7976,27 @@ public class Hbase {
         return false;
       }
 
-      public void start(I iface, getRegionInfo_args args, org.apache.thrift.async.AsyncMethodCallback<TRegionInfo> resultHandler) throws org.apache.thrift.TException {
-        iface.getRegionInfo(args.row,resultHandler);
+      public void start(I iface, append_args args, org.apache.thrift.async.AsyncMethodCallback<java.util.List<TCell>> resultHandler) throws org.apache.thrift.TException {
+        iface.append(args.append,resultHandler);
       }
     }
 
-    public static class append<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, append_args, java.util.List<TCell>> {
-      public append() {
-        super("append");
+    public static class checkAndPut<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, checkAndPut_args, java.lang.Boolean> {
+      public checkAndPut() {
+        super("checkAndPut");
       }
 
-      public append_args getEmptyArgsInstance() {
-        return new append_args();
+      public checkAndPut_args getEmptyArgsInstance() {
+        return new checkAndPut_args();
       }
 
-      public org.apache.thrift.async.AsyncMethodCallback<java.util.List<TCell>> getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {
+      public org.apache.thrift.async.AsyncMethodCallback<java.lang.Boolean> getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {
         final org.apache.thrift.AsyncProcessFunction fcall = this;
-        return new org.apache.thrift.async.AsyncMethodCallback<java.util.List<TCell>>() { 
-          public void onComplete(java.util.List<TCell> o) {
-            append_result result = new append_result();
+        return new org.apache.thrift.async.AsyncMethodCallback<java.lang.Boolean>() { 
+          public void onComplete(java.lang.Boolean o) {
+            checkAndPut_result result = new checkAndPut_result();
             result.success = o;
+            result.setSuccessIsSet(true);
             try {
               fcall.sendResponse(fb, result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
             } catch (org.apache.thrift.transport.TTransportException e) {
@@ -8034,11 +8010,15 @@ public class Hbase {
           public void onError(java.lang.Exception e) {
             byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
             org.apache.thrift.TSerializable msg;
-            append_result result = new append_result();
+            checkAndPut_result result = new checkAndPut_result();
             if (e instanceof IOError) {
               result.io = (IOError) e;
               result.setIoIsSet(true);
               msg = result;
+            } else if (e instanceof IllegalArgument) {
+              result.ia = (IllegalArgument) e;
+              result.setIaIsSet(true);
+              msg = result;
             } else if (e instanceof org.apache.thrift.transport.TTransportException) {
               _LOGGER.error("TTransportException inside handler", e);
               fb.close();
@@ -8066,27 +8046,26 @@ public class Hbase {
         return false;
       }
 
-      public void start(I iface, append_args args, org.apache.thrift.async.AsyncMethodCallback<java.util.List<TCell>> resultHandler) throws org.apache.thrift.TException {
-        iface.append(args.append,resultHandler);
+      public void start(I iface, checkAndPut_args args, org.apache.thrift.async.AsyncMethodCallback<java.lang.Boolean> resultHandler) throws org.apache.thrift.TException {
+        iface.checkAndPut(args.tableName, args.row, args.column, args.value, args.mput, args.attributes,resultHandler);
       }
     }
 
-    public static class checkAndPut<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, checkAndPut_args, java.lang.Boolean> {
-      public checkAndPut() {
-        super("checkAndPut");
+    public static class getThriftServerType<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, getThriftServerType_args, TThriftServerType> {
+      public getThriftServerType() {
+        super("getThriftServerType");
       }
 
-      public checkAndPut_args getEmptyArgsInstance() {
-        return new checkAndPut_args();
+      public getThriftServerType_args getEmptyArgsInstance() {
+        return new getThriftServerType_args();
       }
 
-      public org.apache.thrift.async.AsyncMethodCallback<java.lang.Boolean> getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {
+      public org.apache.thrift.async.AsyncMethodCallback<TThriftServerType> getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {
         final org.apache.thrift.AsyncProcessFunction fcall = this;
-        return new org.apache.thrift.async.AsyncMethodCallback<java.lang.Boolean>() { 
-          public void onComplete(java.lang.Boolean o) {
-            checkAndPut_result result = new checkAndPut_result();
+        return new org.apache.thrift.async.AsyncMethodCallback<TThriftServerType>() { 
+          public void onComplete(TThriftServerType o) {
+            getThriftServerType_result result = new getThriftServerType_result();
             result.success = o;
-            result.setSuccessIsSet(true);
             try {
               fcall.sendResponse(fb, result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
             } catch (org.apache.thrift.transport.TTransportException e) {
@@ -8100,16 +8079,8 @@ public class Hbase {
           public void onError(java.lang.Exception e) {
             byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
             org.apache.thrift.TSerializable msg;
-            checkAndPut_result result = new checkAndPut_result();
-            if (e instanceof IOError) {
-              result.io = (IOError) e;
-              result.setIoIsSet(true);
-              msg = result;
-            } else if (e instanceof IllegalArgument) {
-              result.ia = (IllegalArgument) e;
-              result.setIaIsSet(true);
-              msg = result;
-            } else if (e instanceof org.apache.thrift.transport.TTransportException) {
+            getThriftServerType_result result = new getThriftServerType_result();
+            if (e instanceof org.apache.thrift.transport.TTransportException) {
               _LOGGER.error("TTransportException inside handler", e);
               fb.close();
               return;
@@ -8136,8 +8107,8 @@ public class Hbase {
         return false;
       }
 
-      public void start(I iface, checkAndPut_args args, org.apache.thrift.async.AsyncMethodCallback<java.lang.Boolean> resultHandler) throws org.apache.thrift.TException {
-        iface.checkAndPut(args.tableName, args.row, args.column, args.value, args.mput, args.attributes,resultHandler);
+      public void start(I iface, getThriftServerType_args args, org.apache.thrift.async.AsyncMethodCallback<TThriftServerType> resultHandler) throws org.apache.thrift.TException {
+        iface.getThriftServerType(resultHandler);
       }
     }
 
@@ -56522,43 +56493,25 @@ public class Hbase {
     }
   }
 
-  public static class getRowOrBefore_args implements org.apache.thrift.TBase<getRowOrBefore_args, getRowOrBefore_args._Fields>, java.io.Serializable, Cloneable, Comparable<getRowOrBefore_args>   {
-    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getRowOrBefore_args");
+  public static class getRegionInfo_args implements org.apache.thrift.TBase<getRegionInfo_args, getRegionInfo_args._Fields>, java.io.Serializable, Cloneable, Comparable<getRegionInfo_args>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getRegionInfo_args");
 
-    private static final org.apache.thrift.protocol.TField TABLE_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("tableName", org.apache.thrift.protocol.TType.STRING, (short)1);
-    private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)2);
-    private static final org.apache.thrift.protocol.TField FAMILY_FIELD_DESC = new org.apache.thrift.protocol.TField("family", org.apache.thrift.protocol.TType.STRING, (short)3);
+    private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1);
 
-    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new getRowOrBefore_argsStandardSchemeFactory();
-    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new getRowOrBefore_argsTupleSchemeFactory();
+    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new getRegionInfo_argsStandardSchemeFactory();
+    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new getRegionInfo_argsTupleSchemeFactory();
 
     /**
-     * name of table
-     */
-    public @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer tableName; // required
-    /**
      * row key
      */
     public @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer row; // required
-    /**
-     * column name
-     */
-    public @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer family; // required
 
     /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
     public enum _Fields implements org.apache.thrift.TFieldIdEnum {
       /**
-       * name of table
-       */
-      TABLE_NAME((short)1, "tableName"),
-      /**
        * row key
        */
-      ROW((short)2, "row"),
-      /**
-       * column name
-       */
-      FAMILY((short)3, "family");
+      ROW((short)1, "row");
 
       private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
 
@@ -56574,12 +56527,8 @@ public class Hbase {
       @org.apache.thrift.annotation.Nullable
       public static _Fields findByThriftId(int fieldId) {
         switch(fieldId) {
-          case 1: // TABLE_NAME
-            return TABLE_NAME;
-          case 2: // ROW
+          case 1: // ROW
             return ROW;
-          case 3: // FAMILY
-            return FAMILY;
           default:
             return null;
         }
@@ -56624,94 +56573,38 @@ public class Hbase {
     public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
     static {
       java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-      tmpMap.put(_Fields.TABLE_NAME, new org.apache.thrift.meta_data.FieldMetaData("tableName", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING          , "Text")));
       tmpMap.put(_Fields.ROW, new org.apache.thrift.meta_data.FieldMetaData("row", org.apache.thrift.TFieldRequirementType.DEFAULT, 
           new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING          , "Text")));
-      tmpMap.put(_Fields.FAMILY, new org.apache.thrift.meta_data.FieldMetaData("family", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING          , "Text")));
       metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
-      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getRowOrBefore_args.class, metaDataMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getRegionInfo_args.class, metaDataMap);
     }
 
-    public getRowOrBefore_args() {
+    public getRegionInfo_args() {
     }
 
-    public getRowOrBefore_args(
-      java.nio.ByteBuffer tableName,
-      java.nio.ByteBuffer row,
-      java.nio.ByteBuffer family)
+    public getRegionInfo_args(
+      java.nio.ByteBuffer row)
     {
       this();
-      this.tableName = org.apache.thrift.TBaseHelper.copyBinary(tableName);
       this.row = org.apache.thrift.TBaseHelper.copyBinary(row);
-      this.family = org.apache.thrift.TBaseHelper.copyBinary(family);
     }
 
     /**
      * Performs a deep copy on <i>other</i>.
      */
-    public getRowOrBefore_args(getRowOrBefore_args other) {
-      if (other.isSetTableName()) {
-        this.tableName = org.apache.thrift.TBaseHelper.copyBinary(other.tableName);
-      }
+    public getRegionInfo_args(getRegionInfo_args other) {
       if (other.isSetRow()) {
         this.row = org.apache.thrift.TBaseHelper.copyBinary(other.row);
       }
-      if (other.isSetFamily()) {
-        this.family = org.apache.thrift.TBaseHelper.copyBinary(other.family);
-      }
     }
 
-    public getRowOrBefore_args deepCopy() {
-      return new getRowOrBefore_args(this);
+    public getRegionInfo_args deepCopy() {
+      return new getRegionInfo_args(this);
     }
 
     @Override
     public void clear() {
-      this.tableName = null;
       this.row = null;
-      this.family = null;
-    }
-
-    /**
-     * name of table
-     */
-    public byte[] getTableName() {
-      setTableName(org.apache.thrift.TBaseHelper.rightSize(tableName));
-      return tableName == null ? null : tableName.array();
-    }
-
-    public java.nio.ByteBuffer bufferForTableName() {
-      return org.apache.thrift.TBaseHelper.copyBinary(tableName);
-    }
-
-    /**
-     * name of table
-     */
-    public getRowOrBefore_args setTableName(byte[] tableName) {
-      this.tableName = tableName == null ? (java.nio.ByteBuffer)null     : java.nio.ByteBuffer.wrap(tableName.clone());
-      return this;
-    }
-
-    public getRowOrBefore_args setTableName(@org.apache.thrift.annotation.Nullable java.nio.ByteBuffer tableName) {
-      this.tableName = org.apache.thrift.TBaseHelper.copyBinary(tableName);
-      return this;
-    }
-
-    public void unsetTableName() {
-      this.tableName = null;
-    }
-
-    /** Returns true if field tableName is set (has been assigned a value) and false otherwise */
-    public boolean isSetTableName() {
-      return this.tableName != null;
-    }
-
-    public void setTableNameIsSet(boolean value) {
-      if (!value) {
-        this.tableName = null;
-      }
     }
 
     /**
@@ -56729,12 +56622,12 @@ public class Hbase {
     /**
      * row key
      */
-    public getRowOrBefore_args setRow(byte[] row) {
+    public getRegionInfo_args setRow(byte[] row) {
       this.row = row == null ? (java.nio.ByteBuffer)null     : java.nio.ByteBuffer.wrap(row.clone());
       return this;
     }
 
-    public getRowOrBefore_args setRow(@org.apache.thrift.annotation.Nullable java.nio.ByteBuffer row) {
+    public getRegionInfo_args setRow(@org.apache.thrift.annotation.Nullable java.nio.ByteBuffer row) {
       this.row = org.apache.thrift.TBaseHelper.copyBinary(row);
       return this;
     }
@@ -56754,60 +56647,8 @@ public class Hbase {
       }
     }
 
-    /**
-     * column name
-     */
-    public byte[] getFamily() {
-      setFamily(org.apache.thrift.TBaseHelper.rightSize(family));
-      return family == null ? null : family.array();
-    }
-
-    public java.nio.ByteBuffer bufferForFamily() {
-      return org.apache.thrift.TBaseHelper.copyBinary(family);
-    }
-
-    /**
-     * column name
-     */
-    public getRowOrBefore_args setFamily(byte[] family) {
-      this.family = family == null ? (java.nio.ByteBuffer)null     : java.nio.ByteBuffer.wrap(family.clone());
-      return this;
-    }
-
-    public getRowOrBefore_args setFamily(@org.apache.thrift.annotation.Nullable java.nio.ByteBuffer family) {
-      this.family = org.apache.thrift.TBaseHelper.copyBinary(family);
-      return this;
-    }
-
-    public void unsetFamily() {
-      this.family = null;
-    }
-
-    /** Returns true if field family is set (has been assigned a value) and false otherwise */
-    public boolean isSetFamily() {
-      return this.family != null;
-    }
-
-    public void setFamilyIsSet(boolean value) {
-      if (!value) {
-        this.family = null;
-      }
-    }
-
     public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
       switch (field) {
-      case TABLE_NAME:
-        if (value == null) {
-          unsetTableName();
-        } else {
-          if (value instanceof byte[]) {
-            setTableName((byte[])value);
-          } else {
-            setTableName((java.nio.ByteBuffer)value);
-          }
-        }
-        break;
-
       case ROW:
         if (value == null) {
           unsetRow();
@@ -56820,33 +56661,15 @@ public class Hbase {
         }
         break;
 
-      case FAMILY:
-        if (value == null) {
-          unsetFamily();
-        } else {
-          if (value instanceof byte[]) {
-            setFamily((byte[])value);
-          } else {
-            setFamily((java.nio.ByteBuffer)value);
-          }
-        }
-        break;
-
       }
     }
 
     @org.apache.thrift.annotation.Nullable
     public java.lang.Object getFieldValue(_Fields field) {
       switch (field) {
-      case TABLE_NAME:
-        return getTableName();
-
       case ROW:
         return getRow();
 
-      case FAMILY:
-        return getFamily();
-
       }
       throw new java.lang.IllegalStateException();
     }
@@ -56858,12 +56681,8 @@ public class Hbase {
       }
 
       switch (field) {
-      case TABLE_NAME:
-        return isSetTableName();
       case ROW:
         return isSetRow();
-      case FAMILY:
-        return isSetFamily();
       }
       throw new java.lang.IllegalStateException();
     }
@@ -56872,26 +56691,17 @@ public class Hbase {
     public boolean equals(java.lang.Object that) {
       if (that == null)
         return false;
-      if (that instanceof getRowOrBefore_args)
-        return this.equals((getRowOrBefore_args)that);
+      if (that instanceof getRegionInfo_args)
+        return this.equals((getRegionInfo_args)that);
       return false;
     }
 
-    public boolean equals(getRowOrBefore_args that) {
+    public boolean equals(getRegionInfo_args that) {
       if (that == null)
         return false;
       if (this == that)
         return true;
 
-      boolean this_present_tableName = true && this.isSetTableName();
-      boolean that_present_tableName = true && that.isSetTableName();
-      if (this_present_tableName || that_present_tableName) {
-        if (!(this_present_tableName && that_present_tableName))
-          return false;
-        if (!this.tableName.equals(that.tableName))
-          return false;
-      }
-
       boolean this_present_row = true && this.isSetRow();
       boolean that_present_row = true && that.isSetRow();
       if (this_present_row || that_present_row) {
@@ -56901,15 +56711,6 @@ public class Hbase {
           return false;
       }
 
-      boolean this_present_family = true && this.isSetFamily();
-      boolean that_present_family = true && that.isSetFamily();
-      if (this_present_family || that_present_family) {
-        if (!(this_present_family && that_present_family))
-          return false;
-        if (!this.family.equals(that.family))
-          return false;
-      }
-
       return true;
     }
 
@@ -56917,39 +56718,21 @@ public class Hbase {
     public int hashCode() {
       int hashCode = 1;
 
-      hashCode = hashCode * 8191 + ((isSetTableName()) ? 131071 : 524287);
-      if (isSetTableName())
-        hashCode = hashCode * 8191 + tableName.hashCode();
-
       hashCode = hashCode * 8191 + ((isSetRow()) ? 131071 : 524287);
       if (isSetRow())
         hashCode = hashCode * 8191 + row.hashCode();
 
-      hashCode = hashCode * 8191 + ((isSetFamily()) ? 131071 : 524287);
-      if (isSetFamily())
-        hashCode = hashCode * 8191 + family.hashCode();
-
       return hashCode;
     }
 
     @Override
-    public int compareTo(getRowOrBefore_args other) {
+    public int compareTo(getRegionInfo_args other) {
       if (!getClass().equals(other.getClass())) {
         return getClass().getName().compareTo(other.getClass().getName());
       }
 
       int lastComparison = 0;
 
-      lastComparison = java.lang.Boolean.valueOf(isSetTableName()).compareTo(other.isSetTableName());
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-      if (isSetTableName()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tableName, other.tableName);
-        if (lastComparison != 0) {
-          return lastComparison;
-        }
-      }
       lastComparison = java.lang.Boolean.valueOf(isSetRow()).compareTo(other.isSetRow());
       if (lastComparison != 0) {
         return lastComparison;
@@ -56960,16 +56743,6 @@ public class Hbase {
           return lastComparison;
         }
       }
-      lastComparison = java.lang.Boolean.valueOf(isSetFamily()).compareTo(other.isSetFamily());
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-      if (isSetFamily()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.family, other.family);
-        if (lastComparison != 0) {
-          return lastComparison;
-        }
-      }
       return 0;
     }
 
@@ -56988,17 +56761,9 @@ public class Hbase {
 
     @Override
     public java.lang.String toString() {
-      java.lang.StringBuilder sb = new java.lang.StringBuilder("getRowOrBefore_args(");
+      java.lang.StringBuilder sb = new java.lang.StringBuilder("getRegionInfo_args(");
       boolean first = true;
 
-      sb.append("tableName:");
-      if (this.tableName == null) {
-        sb.append("null");
-      } else {
-        org.apache.thrift.TBaseHelper.toString(this.tableName, sb);
-      }
-      first = false;
-      if (!first) sb.append(", ");
       sb.append("row:");
       if (this.row == null) {
         sb.append("null");
@@ -57006,14 +56771,6 @@ public class Hbase {
         org.apache.thrift.TBaseHelper.toString(this.row, sb);
       }
       first = false;
-      if (!first) sb.append(", ");
-      sb.append("family:");
-      if (this.family == null) {
-        sb.append("null");
-      } else {
-        org.apache.thrift.TBaseHelper.toString(this.family, sb);
-      }
-      first = false;
       sb.append(")");
       return sb.toString();
     }
@@ -57039,15 +56796,15 @@ public class Hbase {
       }
     }
 
-    private static class getRowOrBefore_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-      public getRowOrBefore_argsStandardScheme getScheme() {
-        return new getRowOrBefore_argsStandardScheme();
+    private static class getRegionInfo_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
+      public getRegionInfo_argsStandardScheme getScheme() {
+        return new getRegionInfo_argsStandardScheme();
       }
     }
 
-    private static class getRowOrBefore_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<getRowOrBefore_args> {
+    private static class getRegionInfo_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<getRegionInfo_args> {
 
-      public void read(org.apache.thrift.protocol.TProtocol iprot, getRowOrBefore_args struct) throws org.apache.thrift.TException {
+      public void read(org.apache.thrift.protocol.TProtocol iprot, getRegionInfo_args struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TField schemeField;
         iprot.readStructBegin();
         while (true)
@@ -57057,15 +56814,7 @@ public class Hbase {
             break;
           }
           switch (schemeField.id) {
-            case 1: // TABLE_NAME
-              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-                struct.tableName = iprot.readBinary();
-                struct.setTableNameIsSet(true);
-              } else { 
-                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-              }
-              break;
-            case 2: // ROW
+            case 1: // ROW
               if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
                 struct.row = iprot.readBinary();
                 struct.setRowIsSet(true);
@@ -57073,14 +56822,6 @@ public class Hbase {
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
               break;
-            case 3: // FAMILY
-              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-                struct.family = iprot.readBinary();
-                struct.setFamilyIsSet(true);
-              } else { 
-                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-              }
-              break;
             default:
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
           }
@@ -57092,80 +56833,50 @@ public class Hbase {
         struct.validate();
       }
 
-      public void write(org.apache.thrift.protocol.TProtocol oprot, getRowOrBefore_args struct) throws org.apache.thrift.TException {
+      public void write(org.apache.thrift.protocol.TProtocol oprot, getRegionInfo_args struct) throws org.apache.thrift.TException {
         struct.validate();
 
         oprot.writeStructBegin(STRUCT_DESC);
-        if (struct.tableName != null) {
-          oprot.writeFieldBegin(TABLE_NAME_FIELD_DESC);
-          oprot.writeBinary(struct.tableName);
-          oprot.writeFieldEnd();
-        }
         if (struct.row != null) {
           oprot.writeFieldBegin(ROW_FIELD_DESC);
           oprot.writeBinary(struct.row);
           oprot.writeFieldEnd();
         }
-        if (struct.family != null) {
-          oprot.writeFieldBegin(FAMILY_FIELD_DESC);
-          oprot.writeBinary(struct.family);
-          oprot.writeFieldEnd();
-        }
         oprot.writeFieldStop();
         oprot.writeStructEnd();
       }
 
     }
 
-    private static class getRowOrBefore_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-      public getRowOrBefore_argsTupleScheme getScheme() {
-        return new getRowOrBefore_argsTupleScheme();
+    private static class getRegionInfo_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
+      public getRegionInfo_argsTupleScheme getScheme() {
+        return new getRegionInfo_argsTupleScheme();
       }
     }
 
-    private static class getRowOrBefore_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme<getRowOrBefore_args> {
+    private static class getRegionInfo_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme<getRegionInfo_args> {
 
       @Override
-      public void write(org.apache.thrift.protocol.TProtocol prot, getRowOrBefore_args struct) throws org.apache.thrift.TException {
+      public void write(org.apache.thrift.protocol.TProtocol prot, getRegionInfo_args struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
         java.util.BitSet optionals = new java.util.BitSet();
-        if (struct.isSetTableName()) {
-          optionals.set(0);
-        }
         if (struct.isSetRow()) {
-          optionals.set(1);
-        }
-        if (struct.isSetFamily()) {
-          optionals.set(2);
-        }
-        oprot.writeBitSet(optionals, 3);
-        if (struct.isSetTableName()) {
-          oprot.writeBinary(struct.tableName);
+          optionals.set(0);
         }
+        oprot.writeBitSet(optionals, 1);
         if (struct.isSetRow()) {
           oprot.writeBinary(struct.row);
         }
-        if (struct.isSetFamily()) {
-          oprot.writeBinary(struct.family);
-        }
       }
 
       @Override
-      public void read(org.apache.thrift.protocol.TProtocol prot, getRowOrBefore_args struct) throws org.apache.thrift.TException {
+      public void read(org.apache.thrift.protocol.TProtocol prot, getRegionInfo_args struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
-        java.util.BitSet incoming = iprot.readBitSet(3);
+        java.util.BitSet incoming = iprot.readBitSet(1);
         if (incoming.get(0)) {
-          struct.tableName = iprot.readBinary();
-          struct.setTableNameIsSet(true);
-        }
-        if (incoming.get(1)) {
           struct.row = iprot.readBinary();
           struct.setRowIsSet(true);
         }
-        if (incoming.get(2)) {
-          struct.family = iprot.readBinary();
-          struct.setFamilyIsSet(true);
-        }
       }
     }
 
@@ -57174,16 +56885,16 @@ public class Hbase {
     }
   }
 
-  public static class getRowOrBefore_result implements org.apache.thrift.TBase<getRowOrBefore_result, getRowOrBefore_result._Fields>, java.io.Serializable, Cloneable, Comparable<getRowOrBefore_result>   {
-    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getRowOrBefore_result");
+  public static class getRegionInfo_result implements org.apache.thrift.TBase<getRegionInfo_result, getRegionInfo_result._Fields>, java.io.Serializable, Cloneable, Comparable<getRegionInfo_result>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getRegionInfo_result");
 
-    private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.LIST, (short)0);
+    private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRUCT, (short)0);
     private static final org.apache.thrift.protocol.TField IO_FIELD_DESC = new org.apache.thrift.protocol.TField("io", org.apache.thrift.protocol.TType.STRUCT, (short)1);
 
-    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new getRowOrBefore_resultStandardSchemeFactory();
-    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new getRowOrBefore_resultTupleSchemeFactory();
+    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new getRegionInfo_resultStandardSchemeFactory();
+    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new getRegionInfo_resultTupleSchemeFactory();
 
-    public @org.apache.thrift.annotation.Nullable java.util.List<TCell> success; // required
+    public @org.apache.thrift.annotation.Nullable TRegionInfo success; // required
     public @org.apache.thrift.annotation.Nullable IOError io; // required
 
     /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
@@ -57254,19 +56965,18 @@ public class Hbase {
     static {
       java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
       tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
-              new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TCell.class))));
+          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TRegionInfo.class)));
       tmpMap.put(_Fields.IO, new org.apache.thrift.meta_data.FieldMetaData("io", org.apache.thrift.TFieldRequirementType.DEFAULT, 
           new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, IOError.class)));
       metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
-      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getRowOrBefore_result.class, metaDataMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getRegionInfo_result.class, metaDataMap);
     }
 
-    public getRowOrBefore_result() {
+    public getRegionInfo_result() {
     }
 
-    public getRowOrBefore_result(
-      java.util.List<TCell> success,
+    public getRegionInfo_result(
+      TRegionInfo success,
       IOError io)
     {
       this();
@@ -57277,21 +56987,17 @@ public class Hbase {
     /**
      * Performs a deep copy on <i>other</i>.
      */
-    public getRowOrBefore_result(getRowOrBefore_result other) {
+    public getRegionInfo_result(getRegionInfo_result other) {
       if (other.isSetSuccess()) {
-        java.util.List<TCell> __this__success = new java.util.ArrayList<TCell>(other.success.size());
-        for (TCell other_element : other.success) {
-          __this__success.add(new TCell(other_element));
-        }
-        this.success = __this__success;
+        this.success = new TRegionInfo(other.success);
       }
       if (other.isSetIo()) {
         this.io = new IOError(other.io);
       }
     }
 
-    public getRowOrBefore_result deepCopy() {
-      return new getRowOrBefore_result(this);
+    public getRegionInfo_result deepCopy() {
+      return new getRegionInfo_result(this);
     }
 
     @Override
@@ -57300,28 +57006,12 @@ public class Hbase {
       this.io = null;
     }
 
-    public int getSuccessSize() {
-      return (this.success == null) ? 0 : this.success.size();
-    }
-
-    @org.apache.thrift.annotation.Nullable
-    public java.util.Iterator<TCell> getSuccessIterator() {
-      return (this.success == null) ? null : this.success.iterator();
-    }
-
-    public void addToSuccess(TCell elem) {
-      if (this.success == null) {
-        this.success = new java.util.ArrayList<TCell>();
-      }
-      this.success.add(elem);
-    }
-
     @org.apache.thrift.annotation.Nullable
-    public java.util.List<TCell> getSuccess() {
+    public TRegionInfo getSuccess() {
       return this.success;
     }
 
-    public getRowOrBefore_result setSuccess(@org.apache.thrift.annotation.Nullable java.util.List<TCell> success) {
+    public getRegionInfo_result setSuccess(@org.apache.thrift.annotation.Nullable TRegionInfo success) {
       this.success = success;
       return this;
     }
@@ -57346,7 +57036,7 @@ public class Hbase {
       return this.io;
     }
 
-    public getRowOrBefore_result setIo(@org.apache.thrift.annotation.Nullable IOError io) {
+    public getRegionInfo_result setIo(@org.apache.thrift.annotation.Nullable IOError io) {
       this.io = io;
       return this;
     }
@@ -57372,7 +57062,7 @@ public class Hbase {
         if (value == null) {
           unsetSuccess();
         } else {
-          setSuccess((java.util.List<TCell>)value);
+          setSuccess((TRegionInfo)value);
         }
         break;
 
@@ -57419,12 +57109,12 @@ public class Hbase {
     public boolean equals(java.lang.Object that) {
       if (that == null)
         return false;
-      if (that instanceof getRowOrBefore_result)
-        return this.equals((getRowOrBefore_result)that);
+      if (that instanceof getRegionInfo_result)
+        return this.equals((getRegionInfo_result)that);
       return false;
     }
 
-    public boolean equals(getRowOrBefore_result that) {
+    public boolean equals(getRegionInfo_result that) {
       if (that == null)
         return false;
       if (this == that)
@@ -57467,7 +57157,7 @@ public class Hbase {
     }
 
     @Override
-    public int compareTo(getRowOrBefore_result other) {
+    public int compareTo(getRegionInfo_result other) {
       if (!getClass().equals(other.getClass())) {
         return getClass().getName().compareTo(other.getClass().getName());
       }
@@ -57512,7 +57202,7 @@ public class Hbase {
 
     @Override
     public java.lang.String toString() {
-      java.lang.StringBuilder sb = new java.lang.StringBuilder("getRowOrBefore_result(");
+      java.lang.StringBuilder sb = new java.lang.StringBuilder("getRegionInfo_result(");
       boolean first = true;
 
       sb.append("success:");
@@ -57537,6 +57227,9 @@ public class Hbase {
     public void validate() throws org.apache.thrift.TException {
       // check for required fields
       // check for sub-struct validity
+      if (success != null) {
+        success.validate();
+      }
     }
 
     private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
@@ -57555,15 +57248,15 @@ public class Hbase {
       }
     }
 
-    private static class getRowOrBefore_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-      public getRowOrBefore_resultStandardScheme getScheme() {
-        return new getRowOrBefore_resultStandardScheme();
+    private static class getRegionInfo_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
+      public getRegionInfo_resultStandardScheme getScheme() {
+        return new getRegionInfo_resultStandardScheme();
       }
     }
 
-    private static class getRowOrBefore_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme<getRowOrBefore_result> {
+    private static class getRegionInfo_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme<getRegionInfo_result> {
 
-      public void read(org.apache.thrift.protocol.TProtocol iprot, getRowOrBefore_result struct) throws org.apache.thrift.TException {
+      public void read(org.apache.thrift.protocol.TProtocol iprot, getRegionInfo_result struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TField schemeField;
         iprot.readStructBegin();
         while (true)
@@ -57574,19 +57267,9 @@ public class Hbase {
           }
           switch (schemeField.id) {
             case 0: // SUCCESS
-              if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
-                {
-                  org.apache.thrift.protocol.TList _list582 = iprot.readListBegin();
-                  struct.success = new java.util.ArrayList<TCell>(_list582.size);
-                  @org.apache.thrift.annotation.Nullable TCell _elem583;
-                  for (int _i584 = 0; _i584 < _list582.size; ++_i584)
-                  {
-                    _elem583 = new TCell();
-                    _elem583.read(iprot);
-                    struct.success.add(_elem583);
-                  }
-                  iprot.readListEnd();
-                }
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+                struct.success = new TRegionInfo();
+                struct.success.read(iprot);
                 struct.setSuccessIsSet(true);
               } else { 
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
@@ -57612,20 +57295,13 @@ public class Hbase {
         struct.validate();
       }
 
-      public void write(org.apache.thrift.protocol.TProtocol oprot, getRowOrBefore_result struct) throws org.apache.thrift.TException {
+      public void write(org.apache.thrift.protocol.TProtocol oprot, getRegionInfo_result struct) throws org.apache.thrift.TException {
         struct.validate();
 
         oprot.writeStructBegin(STRUCT_DESC);
         if (struct.success != null) {
           oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
-          {
-            oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.success.size()));
-            for (TCell _iter585 : struct.success)
-            {
-              _iter585.write(oprot);
-            }
-            oprot.writeListEnd();
-          }
+          struct.success.write(oprot);
           oprot.writeFieldEnd();
         }
         if (struct.io != null) {
@@ -57639,16 +57315,16 @@ public class Hbase {
 
     }
 
-    private static class getRowOrBefore_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-      public getRowOrBefore_resultTupleScheme getScheme() {
-        return new getRowOrBefore_resultTupleScheme();
+    private static class getRegionInfo_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
+      public getRegionInfo_resultTupleScheme getScheme() {
+        return new getRegionInfo_resultTupleScheme();
       }
     }
 
-    private static class getRowOrBefore_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme<getRowOrBefore_result> {
+    private static class getRegionInfo_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme<getRegionInfo_result> {
 
       @Override
-      public void write(org.apache.thrift.protocol.TProtocol prot, getRowOrBefore_result struct) throws org.apache.thrift.TException {
+      public void write(org.apache.thrift.protocol.TProtocol prot, getRegionInfo_result struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
         java.util.BitSet optionals = new java.util.BitSet();
         if (struct.isSetSuccess()) {
@@ -57659,13 +57335,7 @@ public class Hbase {
         }
         oprot.writeBitSet(optionals, 2);
         if (struct.isSetSuccess()) {
-          {
-            oprot.writeI32(struct.success.size());
-            for (TCell _iter586 : struct.success)
-            {
-              _iter586.write(oprot);
-            }
-          }
+          struct.success.write(oprot);
         }
         if (struct.isSetIo()) {
           struct.io.write(oprot);
@@ -57673,21 +57343,12 @@ public class Hbase {
       }
 
       @Override
-      public void read(org.apache.thrift.protocol.TProtocol prot, getRowOrBefore_result struct) throws org.apache.thrift.TException {
+      public void read(org.apache.thrift.protocol.TProtocol prot, getRegionInfo_result struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
         java.util.BitSet incoming = iprot.readBitSet(2);
         if (incoming.get(0)) {
-          {
-            org.apache.thrift.protocol.TList _list587 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-            struct.success = new java.util.ArrayList<TCell>(_list587.size);
-            @org.apache.thrift.annotation.Nullable TCell _elem588;
-            for (int _i589 = 0; _i589 < _list587.size; ++_i589)
-            {
-              _elem588 = new TCell();
-              _elem588.read(iprot);
-              struct.success.add(_elem588);
-            }
-          }
+          struct.success = new TRegionInfo();
+          struct.success.read(iprot);
           struct.setSuccessIsSet(true);
         }
         if (incoming.get(1)) {
@@ -57703,25 +57364,25 @@ public class Hbase {
     }
   }
 
-  public static class getRegionInfo_args implements org.apache.thrift.TBase<getRegionInfo_args, getRegionInfo_args._Fields>, java.io.Serializable, Cloneable, Comparable<getRegionInfo_args>   {
-    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getRegionInfo_args");
+  public static class append_args implements org.apache.thrift.TBase<append_args, append_args._Fields>, java.io.Serializable, Cloneable, Comparable<append_args>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("append_args");
 
-    private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1);
+    private static final org.apache.thrift.protocol.TField APPEND_FIELD_DESC = new org.apache.thrift.protocol.TField("append", org.apache.thrift.protocol.TType.STRUCT, (short)1);
 
-    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new getRegionInfo_argsStandardSchemeFactory();
-    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new getRegionInfo_argsTupleSchemeFactory();
+    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new append_argsStandardSchemeFactory();
+    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new append_argsTupleSchemeFactory();
 
     /**
-     * row key
+     * The single append operation to apply
      */
-    public @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer row; // required
+    public @org.apache.thrift.annotation.Nullable TAppend append; // required
 
     /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
     public enum _Fields implements org.apache.thrift.TFieldIdEnum {
       /**
-       * row key
+       * The single append operation to apply
        */
-      ROW((short)1, "row");
+      APPEND((short)1, "append");
 
       private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
 
@@ -57737,8 +57398,8 @@ public class Hbase {
       @org.apache.thrift.annotation.Nullable
       public static _Fields findByThriftId(int fieldId) {
         switch(fieldId) {
-          case 1: // ROW
-            return ROW;
+          case 1: // APPEND
+            return APPEND;
           default:
             return null;
         }
@@ -57783,91 +57444,78 @@ public class Hbase {
     public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
     static {
       java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-      tmpMap.put(_Fields.ROW, new org.apache.thrift.meta_data.FieldMetaData("row", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING          , "Text")));
+      tmpMap.put(_Fields.APPEND, new org.apache.thrift.meta_data.FieldMetaData("append", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TAppend.class)));
       metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
-      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getRegionInfo_args.class, metaDataMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(append_args.class, metaDataMap);
     }
 
-    public getRegionInfo_args() {
+    public append_args() {
     }
 
-    public getRegionInfo_args(
-      java.nio.ByteBuffer row)
+    public append_args(
+      TAppend append)
     {
       this();
-      this.row = org.apache.thrift.TBaseHelper.copyBinary(row);
+      this.append = append;
     }
 
     /**
      * Performs a deep copy on <i>other</i>.
      */
-    public getRegionInfo_args(getRegionInfo_args other) {
-      if (other.isSetRow()) {
-        this.row = org.apache.thrift.TBaseHelper.copyBinary(other.row);
+    public append_args(append_args other) {
+      if (other.isSetAppend()) {
+        this.append = new TAppend(other.append);
       }
     }
 
-    public getRegionInfo_args deepCopy() {
-      return new getRegionInfo_args(this);
+    public append_args deepCopy() {
+      return new append_args(this);
     }
 
     @Override
     public void clear() {
-      this.row = null;
+      this.append = null;
     }
 
     /**
-     * row key
+     * The single append operation to apply
      */
-    public byte[] getRow() {
-      setRow(org.apache.thrift.TBaseHelper.rightSize(row));
-      return row == null ? null : row.array();
-    }
-
-    public java.nio.ByteBuffer bufferForRow() {
-      return org.apache.thrift.TBaseHelper.copyBinary(row);
+    @org.apache.thrift.annotation.Nullable
+    public TAppend getAppend() {
+      return this.append;
     }
 
     /**
-     * row key
+     * The single append operation to apply
      */
-    public getRegionInfo_args setRow(byte[] row) {
-      this.row = row == null ? (java.nio.ByteBuffer)null     : java.nio.ByteBuffer.wrap(row.clone());
-      return this;
-    }
-
-    public getRegionInfo_args setRow(@org.apache.thrift.annotation.Nullable java.nio.ByteBuffer row) {
-      this.row = org.apache.thrift.TBaseHelper.copyBinary(row);
+    public append_args setAppend(@org.apache.thrift.annotation.Nullable TAppend append) {
+      this.append = append;
       return this;
     }
 
-    public void unsetRow() {
-      this.row = null;
+    public void unsetAppend() {
+      this.append = null;
     }
 
-    /** Returns true if field row is set (has been assigned a value) and false otherwise */
-    public boolean isSetRow() {
-      return this.row != null;
+    /** Returns true if field append is set (has been assigned a value) and false otherwise */
+    public boolean isSetAppend() {
+      return this.append != null;
     }
 
-    public void setRowIsSet(boolean value) {
+    public void setAppendIsSet(boolean value) {
       if (!value) {
-        this.row = null;
+        this.append = null;
       }
     }
 
     public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
       switch (field) {
-      case ROW:
+      case APPEND:
         if (value == null) {
-          unsetRow();
+          unsetAppend();
         } else {
-          if (value instanceof byte[]) {
-            setRow((byte[])value);
-          } else {
-            setRow((java.nio.ByteBuffer)value);
-          }
+          setAppend((TAppend)value);
         }
         break;
 
@@ -57877,8 +57525,8 @@ public class Hbase {
     @org.apache.thrift.annotation.Nullable
     public java.lang.Object getFieldValue(_Fields field) {
       switch (field) {
-      case ROW:
-        return getRow();
+      case APPEND:
+        return getAppend();
 
       }
       throw new java.lang.IllegalStateException();
@@ -57891,8 +57539,8 @@ public class Hbase {
       }
 
       switch (field) {
-      case ROW:
-        return isSetRow();
+      case APPEND:
+        return isSetAppend();
       }
       throw new java.lang.IllegalStateException();
     }
@@ -57901,23 +57549,23 @@ public class Hbase {
     public boolean equals(java.lang.Object that) {
       if (that == null)
         return false;
-      if (that instanceof getRegionInfo_args)
-        return this.equals((getRegionInfo_args)that);
+      if (that instanceof append_args)
+        return this.equals((append_args)that);
       return false;
     }
 
-    public boolean equals(getRegionInfo_args that) {
+    public boolean equals(append_args that) {
       if (that == null)
         return false;
       if (this == that)
         return true;
 
-      boolean this_present_row = true && this.isSetRow();
-      boolean that_present_row = true && that.isSetRow();
-      if (this_present_row || that_present_row) {
-        if (!(this_present_row && that_present_row))
+      boolean this_present_append = true && this.isSetAppend();
+      boolean that_present_append = true && that.isSetAppend();
+      if (this_present_append || that_present_append) {
+        if (!(this_present_append && that_present_append))
           return false;
-        if (!this.row.equals(that.row))
+        if (!this.append.equals(that.append))
           return false;
       }
 
@@ -57928,27 +57576,27 @@ public class Hbase {
     public int hashCode() {
       int hashCode = 1;
 
-      hashCode = hashCode * 8191 + ((isSetRow()) ? 131071 : 524287);
-      if (isSetRow())
-        hashCode = hashCode * 8191 + row.hashCode();
+      hashCode = hashCode * 8191 + ((isSetAppend()) ? 131071 : 524287);
+      if (isSetAppend())
+        hashCode = hashCode * 8191 + append.hashCode();
 
       return hashCode;
     }
 
     @Override
-    public int compareTo(getRegionInfo_args other) {
+    public int compareTo(append_args other) {
       if (!getClass().equals(other.getClass())) {
         return getClass().getName().compareTo(other.getClass().getName());
       }
 
       int lastComparison = 0;
 
-      lastComparison = java.lang.Boolean.valueOf(isSetRow()).compareTo(other.isSetRow());
+      lastComparison = java.lang.Boolean.valueOf(isSetAppend()).compareTo(other.isSetAppend());
       if (lastComparison != 0) {
         return lastComparison;
       }
-      if (isSetRow()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.row, other.row);
+      if (isSetAppend()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.append, other.append);
         if (lastComparison != 0) {
           return lastComparison;
         }
@@ -57971,14 +57619,14 @@ public class Hbase {
 
     @Override
     public java.lang.String toString() {
-      java.lang.StringBuilder sb = new java.lang.StringBuilder("getRegionInfo_args(");
+      java.lang.StringBuilder sb = new java.lang.StringBuilder("append_args(");
       boolean first = true;
 
-      sb.append("row:");
-      if (this.row == null) {
+      sb.append("append:");
+      if (this.append == null) {
         sb.append("null");
       } else {
-        org.apache.thrift.TBaseHelper.toString(this.row, sb);
+        sb.append(this.append);
       }
       first = false;
       sb.append(")");
@@ -57988,6 +57636,9 @@ public class Hbase {
     public void validate() throws org.apache.thrift.TException {
       // check for required fields
       // check for sub-struct validity
+      if (append != null) {
+        append.validate();
+      }
     }
 
     private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
@@ -58006,15 +57657,15 @@ public class Hbase {
       }
     }
 
-    private static class getRegionInfo_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-      public getRegionInfo_argsStandardScheme getScheme() {
-        return new getRegionInfo_argsStandardScheme();
+    private static class append_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
+      public append_argsStandardScheme getScheme() {
+        return new append_argsStandardScheme();
       }
     }
 
-    private static class getRegionInfo_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<getRegionInfo_args> {
+    private static class append_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<append_args> {
 
-      public void read(org.apache.thrift.protocol.TProtocol iprot, getRegionInfo_args struct) throws org.apache.thrift.TException {
+      public void read(org.apache.thrift.protocol.TProtocol iprot, append_args struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TField schemeField;
         iprot.readStructBegin();
         while (true)
@@ -58024,10 +57675,11 @@ public class Hbase {
             break;
           }
           switch (schemeField.id) {
-            case 1: // ROW
-              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-                struct.row = iprot.readBinary();
-                struct.setRowIsSet(true);
+            case 1: // APPEND
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+                struct.append = new TAppend();
+                struct.append.read(iprot);
+                struct.setAppendIsSet(true);
               } else { 
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
@@ -58043,13 +57695,13 @@ public class Hbase {
         struct.validate();
       }
 
-      public void write(org.apache.thrift.protocol.TProtocol oprot, getRegionInfo_args struct) throws org.apache.thrift.TException {
+      public void write(org.apache.thrift.protocol.TProtocol oprot, append_args struct) throws org.apache.thrift.TException {
         struct.validate();
 
         oprot.writeStructBegin(STRUCT_DESC);
-        if (struct.row != null) {
-          oprot.writeFieldBegin(ROW_FIELD_DESC);
-          oprot.writeBinary(struct.row);
+        if (struct.append != null) {
+          oprot.writeFieldBegin(APPEND_FIELD_DESC);
+          struct.append.write(oprot);
           oprot.writeFieldEnd();
         }
         oprot.writeFieldStop();
@@ -58058,34 +57710,35 @@ public class Hbase {
 
     }
 
-    private static class getRegionInfo_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-      public getRegionInfo_argsTupleScheme getScheme() {
-        return new getRegionInfo_argsTupleScheme();
+    private static class append_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
+      public append_argsTupleScheme getScheme() {
+        return new append_argsTupleScheme();
       }
     }
 
-    private static class getRegionInfo_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme<getRegionInfo_args> {
+    private static class append_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme<append_args> {
 
       @Override
-      public void write(org.apache.thrift.protocol.TProtocol prot, getRegionInfo_args struct) throws org.apache.thrift.TException {
+      public void write(org.apache.thrift.protocol.TProtocol prot, append_args struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
         java.util.BitSet optionals = new java.util.BitSet();
-        if (struct.isSetRow()) {
+        if (struct.isSetAppend()) {
           optionals.set(0);
         }
         oprot.writeBitSet(optionals, 1);
-        if (struct.isSetRow()) {
-          oprot.writeBinary(struct.row);
+        if (struct.isSetAppend()) {
+          struct.append.write(oprot);
         }
       }
 
       @Override
-      public void read(org.apache.thrift.protocol.TProtocol prot, getRegionInfo_args struct) throws org.apache.thrift.TException {
+      public void read(org.apache.thrift.protocol.TProtocol prot, append_args struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
         java.util.BitSet incoming = iprot.readBitSet(1);
         if (incoming.get(0)) {
-          struct.row = iprot.readBinary();
-          struct.setRowIsSet(true);
+          struct.append = new TAppend();
+          struct.append.read(iprot);
+          struct.setAppendIsSet(true);
         }
       }
     }
@@ -58095,16 +57748,16 @@ public class Hbase {
     }
   }
 
-  public static class getRegionInfo_result implements org.apache.thrift.TBase<getRegionInfo_result, getRegionInfo_result._Fields>, java.io.Serializable, Cloneable, Comparable<getRegionInfo_result>   {
-    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getRegionInfo_result");
+  public static class append_result implements org.apache.thrift.TBase<append_result, append_result._Fields>, java.io.Serializable, Cloneable, Comparable<append_result>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("append_result");
 
-    private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRUCT, (short)0);
+    private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.LIST, (short)0);
     private static final org.apache.thrift.protocol.TField IO_FIELD_DESC = new org.apache.thrift.protocol.TField("io", org.apache.thrift.protocol.TType.STRUCT, (short)1);
 
-    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new getRegionInfo_resultStandardSchemeFactory();
-    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new getRegionInfo_resultTupleSchemeFactory();
+    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new append_resultStandardSchemeFactory();
+    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new append_resultTupleSchemeFactory();
 
-    public @org.apache.thrift.annotation.Nullable TRegionInfo success; // required
+    public @org.apache.thrift.annotation.Nullable java.util.List<TCell> success; // required
     public @org.apache.thrift.annotation.Nullable IOError io; // required
 
     /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
@@ -58175,18 +57828,19 @@ public class Hbase {
     static {
       java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
       tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TRegionInfo.class)));
+          new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
+              new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TCell.class))));
       tmpMap.put(_Fields.IO, new org.apache.thrift.meta_data.FieldMetaData("io", org.apache.thrift.TFieldRequirementType.DEFAULT, 
           new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, IOError.class)));
       metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
-      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getRegionInfo_result.class, metaDataMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(append_result.class, metaDataMap);
     }
 
-    public getRegionInfo_result() {
+    public append_result() {
     }
 
-    public getRegionInfo_result(
-      TRegionInfo success,
+    public append_result(
+      java.util.List<TCell> success,
       IOError io)
     {
       this();
@@ -58197,17 +57851,21 @@ public class Hbase {
     /**
      * Performs a deep copy on <i>other</i>.
      */
-    public getRegionInfo_result(getRegionInfo_result other) {
+    public append_result(append_result other) {
       if (other.isSetSuccess()) {
-        this.success = new TRegionInfo(other.success);
+        java.util.List<TCell> __this__success = new java.util.ArrayList<TCell>(other.success.size());
+        for (TCell other_element : other.success) {
+          __this__success.add(new TCell(other_element));
+        }
+        this.success = __this__success;
       }
       if (other.isSetIo()) {
         this.io = new IOError(other.io);
       }
     }
 
-    public getRegionInfo_result deepCopy() {
-      return new getRegionInfo_result(this);
+    public append_result deepCopy() {
+      return new append_result(this);
     }
 
     @Override
@@ -58216,12 +57874,28 @@ public class Hbase {
       this.io = null;
     }
 
+    public int getSuccessSize() {
+      return (this.success == null) ? 0 : this.success.size();
+    }
+
     @org.apache.thrift.annotation.Nullable
-    public TRegionInfo getSuccess() {
+    public java.util.Iterator<TCell> getSuccessIterator() {
+      return (this.success == null) ? null : this.success.iterator();
+    }
+
+    public void addToSuccess(TCell elem) {
+      if (this.success == null) {
+        this.success = new java.util.ArrayList<TCell>();
+      }
+      this.success.add(elem);
+    }
+
+    @org.apache.thrift.annotation.Nullable
+    public java.util.List<TCell> getSuccess() {
       return this.success;
     }
 
-    public getRegionInfo_result setSuccess(@org.apache.thrift.annotation.Nullable TRegionInfo success) {
+    public append_result setSuccess(@org.apache.thrift.annotation.Nullable java.util.List<TCell> success) {
       this.success = success;
       return this;
     }
@@ -58246,7 +57920,7 @@ public class Hbase {
       return this.io;
     }
 
-    public getRegionInfo_result setIo(@org.apache.thrift.annotation.Nullable IOError io) {
+    public append_result setIo(@org.apache.thrift.annotation.Nullable IOError io) {
       this.io = io;
       return this;
     }
@@ -58272,7 +57946,7 @@ public class Hbase {
         if (value == null) {
           unsetSuccess();
         } else {
-          setSuccess((TRegionInfo)value);
+          setSuccess((java.util.List<TCell>)value);
         }
         break;
 
@@ -58319,12 +57993,12 @@ public class Hbase {
     public boolean equals(java.lang.Object that) {
       if (that == null)
         return false;
-      if (that instanceof getRegionInfo_result)
-        return this.equals((getRegionInfo_result)that);
+      if (that instanceof append_result)
+        return this.equals((append_result)that);
       return false;
     }
 
-    public boolean equals(getRegionInfo_result that) {
+    public boolean equals(append_result that) {
       if (that == null)
         return false;
       if (this == that)
@@ -58367,7 +58041,7 @@ public class Hbase {
     }
 
     @Override
-    public int compareTo(getRegionInfo_result other) {
+    public int compareTo(append_result other) {
       if (!getClass().equals(other.getClass())) {
         return getClass().getName().compareTo(other.getClass().getName());
       }
@@ -58412,7 +58086,7 @@ public class Hbase {
 
     @Override
     public java.lang.String toString() {
-      java.lang.StringBuilder sb = new java.lang.StringBuilder("getRegionInfo_result(");
+      java.lang.StringBuilder sb = new java.lang.StringBuilder("append_result(");
       boolean first = true;
 
       sb.append("success:");
@@ -58437,9 +58111,6 @@ public class Hbase {
     public void validate() throws org.apache.thrift.TException {
       // check for required fields
       // check for sub-struct validity
-      if (success != null) {
-        success.validate();
-      }
     }
 
     private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
@@ -58458,15 +58129,15 @@ public class Hbase {
       }
     }
 
-    private static class getRegionInfo_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-      public getRegionInfo_resultStandardScheme getScheme() {
-        return new getRegionInfo_resultStandardScheme();
+    private static class append_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
+      public append_resultStandardScheme getScheme() {
+        return new append_resultStandardScheme();
       }
     }
 
-    private static class getRegionInfo_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme<getRegionInfo_result> {
+    private static class append_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme<append_result> {
 
-      public void read(org.apache.thrift.protocol.TProtocol iprot, getRegionInfo_result struct) throws org.apache.thrift.TException {
+      public void read(org.apache.thrift.protocol.TProtocol iprot, append_result struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TField schemeField;
         iprot.readStructBegin();
         while (true)
@@ -58477,9 +58148,19 @@ public class Hbase {
           }
           switch (schemeField.id) {
             case 0: // SUCCESS
-              if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
-                struct.success = new TRegionInfo();
-                struct.success.read(iprot);
+              if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+                {
+                  org.apache.thrift.protocol.TList _list582 = iprot.readListBegin();
+                  struct.success = new java.util.ArrayList<TCell>(_list582.size);
+                  @org.apache.thrift.annotation.Nullable TCell _elem583;
+                  for (int _i584 = 0; _i584 < _list582.size; ++_i584)
+                  {
+                    _elem583 = new TCell();
+                    _elem583.read(iprot);
+                    struct.success.add(_elem583);
+                  }
+                  iprot.readListEnd();
+                }
                 struct.setSuccessIsSet(true);
               } else { 
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
@@ -58505,13 +58186,20 @@ public class Hbase {
         struct.validate();
       }
 
-      public void write(org.apache.thrift.protocol.TProtocol oprot, getRegionInfo_result struct) throws org.apache.thrift.TException {
+      public void write(org.apache.thrift.protocol.TProtocol oprot, append_result struct) throws org.apache.thrift.TException {
         struct.validate();
 
         oprot.writeStructBegin(STRUCT_DESC);
         if (struct.success != null) {
           oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
-          struct.success.write(oprot);
+          {
+            oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.success.size()));
+            for (TCell _iter585 : struct.success)
+            {
+              _iter585.write(oprot);
+            }
+            oprot.writeListEnd();
+          }
           oprot.writeFieldEnd();
         }
         if (struct.io != null) {
@@ -58525,16 +58213,16 @@ public class Hbase {
 
     }
 
-    private static class getRegionInfo_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-      public getRegionInfo_resultTupleScheme getScheme() {
-        return new getRegionInfo_resultTupleScheme();
+    private static class append_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
+      public append_resultTupleScheme getScheme() {
+        return new append_resultTupleScheme();
       }
     }
 
-    private static class getRegionInfo_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme<getRegionInfo_result> {
+    private static class append_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme<append_result> {
 
       @Override
-      public void write(org.apache.thrift.protocol.TProtocol prot, getRegionInfo_result struct) throws org.apache.thrift.TException {
+      public void write(org.apache.thrift.protocol.TProtocol prot, append_result struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
         java.util.BitSet optionals = new java.util.BitSet();
         if (struct.isSetSuccess()) {
@@ -58545,7 +58233,13 @@ public class Hbase {
         }
         oprot.writeBitSet(optionals, 2);
         if (struct.isSetSuccess()) {
-          struct.success.write(oprot);
+          {
+            oprot.writeI32(struct.success.size());
+            for (TCell _iter586 : struct.success)
+            {
+              _iter586.write(oprot);
+            }
+          }
         }
         if (struct.isSetIo()) {
           struct.io.write(oprot);
@@ -58553,12 +58247,21 @@ public class Hbase {
       }
 
       @Override
-      public void read(org.apache.thrift.protocol.TProtocol prot, getRegionInfo_result struct) throws org.apache.thrift.TException {
+      public void read(org.apache.thrift.protocol.TProtocol prot, append_result struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
         java.util.BitSet incoming = iprot.readBitSet(2);
         if (incoming.get(0)) {
-          struct.success = new TRegionInfo();
-          struct.success.read(iprot);
+          {
+            org.apache.thrift.protocol.TList _list587 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+            struct.success = new java.util.ArrayList<TCell>(_list587.size);
+            @org.apache.thrift.annotation.Nullable TCell _elem588;
+            for (int _i589 = 0; _i589 < _list587.size; ++_i589)
+            {
+              _elem588 = new TCell();
+              _elem588.read(iprot);
+              struct.success.add(_elem588);
+            }
+          }
           struct.setSuccessIsSet(true);
         }
         if (incoming.get(1)) {
@@ -58574,25 +58277,74 @@ public class Hbase {
     }
   }
 
-  public static class append_args implements org.apache.thrift.TBase<append_args, append_args._Fields>, java.io.Serializable, Cloneable, Comparable<append_args>   {
-    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("append_args");
+  public static class checkAndPut_args implements org.apache.thrift.TBase<checkAndPut_args, checkAndPut_args._Fields>, java.io.Serializable, Cloneable, Comparable<checkAndPut_args>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("checkAndPut_args");
 
-    private static final org.apache.thrift.protocol.TField APPEND_FIELD_DESC = new org.apache.thrift.protocol.TField("append", org.apache.thrift.protocol.TType.STRUCT, (short)1);
+    private static final org.apache.thrift.protocol.TField TABLE_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("tableName", org.apache.thrift.protocol.TType.STRING, (short)1);
+    private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)2);
+    private static final org.apache.thrift.protocol.TField COLUMN_FIELD_DESC = new org.apache.thrift.protocol.TField("column", org.apache.thrift.protocol.TType.STRING, (short)3);
+    private static final org.apache.thrift.protocol.TField VALUE_FIELD_DESC = new org.apache.thrift.protocol.TField("value", org.apache.thrift.protocol.TType.STRING, (short)5);
+    private static final org.apache.thrift.protocol.TField MPUT_FIELD_DESC = new org.apache.thrift.protocol.TField("mput", org.apache.thrift.protocol.TType.STRUCT, (short)6);
+    private static final org.apache.thrift.protocol.TField ATTRIBUTES_FIELD_DESC = new org.apache.thrift.protocol.TField("attributes", org.apache.thrift.protocol.TType.MAP, (short)7);
 
-    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new append_argsStandardSchemeFactory();
-    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new append_argsTupleSchemeFactory();
+    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new checkAndPut_argsStandardSchemeFactory();
+    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new checkAndPut_argsTupleSchemeFactory();
 
     /**
-     * The single append operation to apply
+     * name of table
      */
-    public @org.apache.thrift.annotation.Nullable TAppend append; // required
+    public @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer tableName; // required
+    /**
+     * row key
+     */
+    public @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer row; // required
+    /**
+     * column name
+     */
+    public @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer column; // required
+    /**
+     * the expected value for the column parameter, if not
+     * provided the check is for the non-existence of the
+     * column in question
+     */
+    public @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer value; // required
+    /**
+     * mutation for the put
+     */
+    public @org.apache.thrift.annotation.Nullable Mutation mput; // required
+    /**
+     * Mutation attributes
+     */
+    public @org.apache.thrift.annotation.Nullable java.util.Map<java.nio.ByteBuffer,java.nio.ByteBuffer> attributes; // required
 
     /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
     public enum _Fields implements org.apache.thrift.TFieldIdEnum {
       /**
-       * The single append operation to apply
+       * name of table
        */
-      APPEND((short)1, "append");
+      TABLE_NAME((short)1, "tableName"),
+      /**
+       * row key
+       */
+      ROW((short)2, "row"),
+      /**
+       * column name
+       */
+      COLUMN((short)3, "column"),
+      /**
+       * the expected value for the column parameter, if not
+       * provided the check is for the non-existence of the
+       * column in question
+       */
+      VALUE((short)5, "value"),
+      /**
+       * mutation for the put
+       */
+      MPUT((short)6, "mput"),
+      /**
+       * Mutation attributes
+       */
+      ATTRIBUTES((short)7, "attributes");
 
       private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
 
@@ -58608,8 +58360,18 @@ public class Hbase {
       @org.apache.thrift.annotation.Nullable
       public static _Fields findByThriftId(int fieldId) {
         switch(fieldId) {
-          case 1: // APPEND
-            return APPEND;
+          case 1: // TABLE_NAME
+            return TABLE_NAME;
+          case 2: // ROW
+            return ROW;
+          case 3: // COLUMN
+            return COLUMN;
+          case 5: // VALUE
+            return VALUE;
+          case 6: // MPUT
+            return MPUT;
+          case 7: // ATTRIBUTES
+            return ATTRIBUTES;
           default:
             return null;
         }
@@ -58654,517 +58416,394 @@ public class Hbase {
     public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
     static {
       java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-      tmpMap.put(_Fields.APPEND, new org.apache.thrift.meta_data.FieldMetaData("append", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TAppend.class)));
+      tmpMap.put(_Fields.TABLE_NAME, new org.apache.thrift.meta_data.FieldMetaData("tableName", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING          , "Text")));
+      tmpMap.put(_Fields.ROW, new org.apache.thrift.meta_data.FieldMetaData("row", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING          , "Text")));
+      tmpMap.put(_Fields.COLUMN, new org.apache.thrift.meta_data.FieldMetaData("column", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING          , "Text")));
+      tmpMap.put(_Fields.VALUE, new org.apache.thrift.meta_data.FieldMetaData("value", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING          , "Text")));
+      tmpMap.put(_Fields.MPUT, new org.apache.thrift.meta_data.FieldMetaData("mput", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, Mutation.class)));
+      tmpMap.put(_Fields.ATTRIBUTES, new org.apache.thrift.meta_data.FieldMetaData("attributes", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, 
+              new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING              , "Text"), 
+              new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING              , "Text"))));
       metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
-      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(append_args.class, metaDataMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(checkAndPut_args.class, metaDataMap);
     }
 
-    public append_args() {
+    public checkAndPut_args() {
     }
 
-    public append_args(
-      TAppend append)
+    public checkAndPut_args(
+      java.nio.ByteBuffer tableName,
+      java.nio.ByteBuffer row,
+      java.nio.ByteBuffer column,
+      java.nio.ByteBuffer value,
+      Mutation mput,
+      java.util.Map<java.nio.ByteBuffer,java.nio.ByteBuffer> attributes)
     {
       this();
-      this.append = append;
+      this.tableName = org.apache.thrift.TBaseHelper.copyBinary(tableName);
+      this.row = org.apache.thrift.TBaseHelper.copyBinary(row);
+      this.column = org.apache.thrift.TBaseHelper.copyBinary(column);
+      this.value = org.apache.thrift.TBaseHelper.copyBinary(value);
+      this.mput = mput;
+      this.attributes = attributes;
     }
 
     /**
      * Performs a deep copy on <i>other</i>.
      */
-    public append_args(append_args other) {
-      if (other.isSetAppend()) {
-        this.append = new TAppend(other.append);
+    public checkAndPut_args(checkAndPut_args other) {
+      if (other.isSetTableName()) {
+        this.tableName = org.apache.thrift.TBaseHelper.copyBinary(other.tableName);
+      }
+      if (other.isSetRow()) {
+        this.row = org.apache.thrift.TBaseHelper.copyBinary(other.row);
+      }
+      if (other.isSetColumn()) {
+        this.column = org.apache.thrift.TBaseHelper.copyBinary(other.column);
+      }
+      if (other.isSetValue()) {
+        this.value = org.apache.thrift.TBaseHelper.copyBinary(other.value);
+      }
+      if (other.isSetMput()) {
+        this.mput = new Mutation(other.mput);
+      }
+      if (other.isSetAttributes()) {
+        java.util.Map<java.nio.ByteBuffer,java.nio.ByteBuffer> __this__attributes = new java.util.HashMap<java.nio.ByteBuffer,java.nio.ByteBuffer>(other.attributes.size());
+        for (java.util.Map.Entry<java.nio.ByteBuffer, java.nio.ByteBuffer> other_element : other.attributes.entrySet()) {
+
+          java.nio.ByteBuffer other_element_key = other_element.getKey();
+          java.nio.ByteBuffer other_element_value = other_element.getValue();
+
+          java.nio.ByteBuffer __this__attributes_copy_key = org.apache.thrift.TBaseHelper.copyBinary(other_element_key);
+
+          java.nio.ByteBuffer __this__attributes_copy_value = org.apache.thrift.TBaseHelper.copyBinary(other_element_value);
+
+          __this__attributes.put(__this__attributes_copy_key, __this__attributes_copy_value);
+        }
+        this.attributes = __this__attributes;
       }
     }
 
-    public append_args deepCopy() {
-      return new append_args(this);
+    public checkAndPut_args deepCopy() {
+      return new checkAndPut_args(this);
     }
 
     @Override
     public void clear() {
-      this.append = null;
+      this.tableName = null;
+      this.row = null;
+      this.column = null;
+      this.value = null;
+      this.mput = null;
+      this.attributes = null;
     }
 
     /**
-     * The single append operation to apply
+     * name of table
      */
-    @org.apache.thrift.annotation.Nullable
-    public TAppend getAppend() {
-      return this.append;
+    public byte[] getTableName() {
+      setTableName(org.apache.thrift.TBaseHelper.rightSize(tableName));
+      return tableName == null ? null : tableName.array();
+    }
+
+    public java.nio.ByteBuffer bufferForTableName() {
+      return org.apache.thrift.TBaseHelper.copyBinary(tableName);
     }
 
     /**
-     * The single append operation to apply
+     * name of table
      */
-    public append_args setAppend(@org.apache.thrift.annotation.Nullable TAppend append) {
-      this.append = append;
+    public checkAndPut_args setTableName(byte[] tableName) {
+      this.tableName = tableName == null ? (java.nio.ByteBuffer)null     : java.nio.ByteBuffer.wrap(tableName.clone());
       return this;
     }
 
-    public void unsetAppend() {
-      this.append = null;
-    }
-
-    /** Returns true if field append is set (has been assigned a value) and false otherwise */
-    public boolean isSetAppend() {
-      return this.append != null;
-    }
-
-    public void setAppendIsSet(boolean value) {
-      if (!value) {
-        this.append = null;
-      }
-    }
-
-    public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
-      switch (field) {
-      case APPEND:
-        if (value == null) {
-          unsetAppend();
-        } else {
-          setAppend((TAppend)value);
-        }
-        break;
-
-      }
-    }
-
-    @org.apache.thrift.annotation.Nullable
-    public java.lang.Object getFieldValue(_Fields field) {
-      switch (field) {
-      case APPEND:
-        return getAppend();
-
-      }
-      throw new java.lang.IllegalStateException();
+    public checkAndPut_args setTableName(@org.apache.thrift.annotation.Nullable java.nio.ByteBuffer tableName) {
+      this.tableName = org.apache.thrift.TBaseHelper.copyBinary(tableName);
+      return this;
     }
 
-    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
-    public boolean isSet(_Fields field) {
-      if (field == null) {
-        throw new java.lang.IllegalArgumentException();
-      }
-
-      switch (field) {
-      case APPEND:
-        return isSetAppend();
-      }
-      throw new java.lang.IllegalStateException();
+    public void unsetTableName() {
+      this.tableName = null;
     }
 
-    @Override
-    public boolean equals(java.lang.Object that) {
-      if (that == null)
-        return false;
-      if (that instanceof append_args)
-        return this.equals((append_args)that);
-      return false;
+    /** Returns true if field tableName is set (has been assigned a value) and false otherwise */
+    public boolean isSetTableName() {
+      return this.tableName != null;
     }
 
-    public boolean equals(append_args that) {
-      if (that == null)
-        return false;
-      if (this == that)
-        return true;
-
-      boolean this_present_append = true && this.isSetAppend();
-      boolean that_present_append = true && that.isSetAppend();
-      if (this_present_append || that_present_append) {
-        if (!(this_present_append && that_present_append))
-          return false;
-        if (!this.append.equals(that.append))
-          return false;
+    public void setTableNameIsSet(boolean value) {
+      if (!value) {
+        this.tableName = null;
       }
-
-      return true;
-    }
-
-    @Override
-    public int hashCode() {
-      int hashCode = 1;
-
-      hashCode = hashCode * 8191 + ((isSetAppend()) ? 131071 : 524287);
-      if (isSetAppend())
-        hashCode = hashCode * 8191 + append.hashCode();
-
-      return hashCode;
     }
 
-    @Override
-    public int compareTo(append_args other) {
-      if (!getClass().equals(other.getClass())) {
-        return getClass().getName().compareTo(other.getClass().getName());
-      }
-
-      int lastComparison = 0;
-
-      lastComparison = java.lang.Boolean.valueOf(isSetAppend()).compareTo(other.isSetAppend());
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-      if (isSetAppend()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.append, other.append);
-        if (lastComparison != 0) {
-          return lastComparison;
-        }
-      }
-      return 0;
+    /**
+     * row key
+     */
+    public byte[] getRow() {
+      setRow(org.apache.thrift.TBaseHelper.rightSize(row));
+      return row == null ? null : row.array();
     }
 
-    @org.apache.thrift.annotation.Nullable
-    public _Fields fieldForId(int fieldId) {
-      return _Fields.findByThriftId(fieldId);
+    public java.nio.ByteBuffer bufferForRow() {
+      return org.apache.thrift.TBaseHelper.copyBinary(row);
     }
 
-    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
-      scheme(iprot).read(iprot, this);
+    /**
+     * row key
+     */
+    public checkAndPut_args setRow(byte[] row) {
+      this.row = row == null ? (java.nio.ByteBuffer)null     : java.nio.ByteBuffer.wrap(row.clone());
+      return this;
     }
 
-    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
-      scheme(oprot).write(oprot, this);
+    public checkAndPut_args setRow(@org.apache.thrift.annotation.Nullable java.nio.ByteBuffer row) {
+      this.row = org.apache.thrift.TBaseHelper.copyBinary(row);
+      return this;
     }
 
-    @Override
-    public java.lang.String toString() {
-      java.lang.StringBuilder sb = new java.lang.StringBuilder("append_args(");
-      boolean first = true;
-
-      sb.append("append:");
-      if (this.append == null) {
-        sb.append("null");
-      } else {
-        sb.append(this.append);
-      }
-      first = false;
-      sb.append(")");
-      return sb.toString();
+    public void unsetRow() {
+      this.row = null;
     }
 
-    public void validate() throws org.apache.thrift.TException {
-      // check for required fields
-      // check for sub-struct validity
-      if (append != null) {
-        append.validate();
-      }
+    /** Returns true if field row is set (has been assigned a value) and false otherwise */
+    public boolean isSetRow() {
+      return this.row != null;
     }
 
-    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
-      try {
-        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
-      } catch (org.apache.thrift.TException te) {
-        throw new java.io.IOException(te);
+    public void setRowIsSet(boolean value) {
+      if (!value) {
+        this.row = null;
       }
     }
 
-    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
-      try {
-        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
-      } catch (org.apache.thrift.TException te) {
-        throw new java.io.IOException(te);
-      }
+    /**
+     * column name
+     */
+    public byte[] getColumn() {
+      setColumn(org.apache.thrift.TBaseHelper.rightSize(column));
+      return column == null ? null : column.array();
     }
 
-    private static class append_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-      public append_argsStandardScheme getScheme() {
-        return new append_argsStandardScheme();
-      }
+    public java.nio.ByteBuffer bufferForColumn() {
+      return org.apache.thrift.TBaseHelper.copyBinary(column);
     }
 
-    private static class append_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<append_args> {
-
-      public void read(org.apache.thrift.protocol.TProtocol iprot, append_args struct) throws org.apache.thrift.TException {
-        org.apache.thrift.protocol.TField schemeField;
-        iprot.readStructBegin();
-        while (true)
-        {
-          schemeField = iprot.readFieldBegin();
-          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
-            break;
-          }
-          switch (schemeField.id) {
-            case 1: // APPEND
-              if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
-                struct.append = new TAppend();
-                struct.append.read(iprot);
-                struct.setAppendIsSet(true);
-              } else { 
-                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-              }
-              break;
-            default:
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-          }
-          iprot.readFieldEnd();
-        }
-        iprot.readStructEnd();
-
-        // check for required fields of primitive type, which can't be checked in the validate method
-        struct.validate();
-      }
-
-      public void write(org.apache.thrift.protocol.TProtocol oprot, append_args struct) throws org.apache.thrift.TException {
-        struct.validate();
-
-        oprot.writeStructBegin(STRUCT_DESC);
-        if (struct.append != null) {
-          oprot.writeFieldBegin(APPEND_FIELD_DESC);
-          struct.append.write(oprot);
-          oprot.writeFieldEnd();
-        }
-        oprot.writeFieldStop();
-        oprot.writeStructEnd();
-      }
-
+    /**
+     * column name
+     */
+    public checkAndPut_args setColumn(byte[] column) {
+      this.column = column == null ? (java.nio.ByteBuffer)null     : java.nio.ByteBuffer.wrap(column.clone());
+      return this;
     }
 
-    private static class append_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-      public append_argsTupleScheme getScheme() {
-        return new append_argsTupleScheme();
-      }
+    public checkAndPut_args setColumn(@org.apache.thrift.annotation.Nullable java.nio.ByteBuffer column) {
+      this.column = org.apache.thrift.TBaseHelper.copyBinary(column);
+      return this;
     }
 
-    private static class append_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme<append_args> {
-
-      @Override
-      public void write(org.apache.thrift.protocol.TProtocol prot, append_args struct) throws org.apache.thrift.TException {
-        org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
-        java.util.BitSet optionals = new java.util.BitSet();
-        if (struct.isSetAppend()) {
-          optionals.set(0);
-        }
-        oprot.writeBitSet(optionals, 1);
-        if (struct.isSetAppend()) {
-          struct.append.write(oprot);
-        }
-      }
-
-      @Override
-      public void read(org.apache.thrift.protocol.TProtocol prot, append_args struct) throws org.apache.thrift.TException {
-        org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
-        java.util.BitSet incoming = iprot.readBitSet(1);
-        if (incoming.get(0)) {
-          struct.append = new TAppend();
-          struct.append.read(iprot);
-          struct.setAppendIsSet(true);
-        }
-      }
+    public void unsetColumn() {
+      this.column = null;
     }
 
-    private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
-      return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
+    /** Returns true if field column is set (has been assigned a value) and false otherwise */
+    public boolean isSetColumn() {
+      return this.column != null;
     }
-  }
-
-  public static class append_result implements org.apache.thrift.TBase<append_result, append_result._Fields>, java.io.Serializable, Cloneable, Comparable<append_result>   {
-    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("append_result");
-
-    private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.LIST, (short)0);
-    private static final org.apache.thrift.protocol.TField IO_FIELD_DESC = new org.apache.thrift.protocol.TField("io", org.apache.thrift.protocol.TType.STRUCT, (short)1);
-
-    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new append_resultStandardSchemeFactory();
-    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new append_resultTupleSchemeFactory();
-
-    public @org.apache.thrift.annotation.Nullable java.util.List<TCell> success; // required
-    public @org.apache.thrift.annotation.Nullable IOError io; // required
 
-    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
-    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
-      SUCCESS((short)0, "success"),
-      IO((short)1, "io");
-
-      private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
-
-      static {
-        for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
-          byName.put(field.getFieldName(), field);
-        }
-      }
-
-      /**
-       * Find the _Fields constant that matches fieldId, or null if its not found.
-       */
-      @org.apache.thrift.annotation.Nullable
-      public static _Fields findByThriftId(int fieldId) {
-        switch(fieldId) {
-          case 0: // SUCCESS
-            return SUCCESS;
-          case 1: // IO
-            return IO;
-          default:
-            return null;
-        }
-      }
-
-      /**
-       * Find the _Fields constant that matches fieldId, throwing an exception
-       * if it is not found.
-       */
-      public static _Fields findByThriftIdOrThrow(int fieldId) {
-        _Fields fields = findByThriftId(fieldId);
-        if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
-        return fields;
-      }
-
-      /**
-       * Find the _Fields constant that matches name, or null if its not found.
-       */
-      @org.apache.thrift.annotation.Nullable
-      public static _Fields findByName(java.lang.String name) {
-        return byName.get(name);
-      }
-
-      private final short _thriftId;
-      private final java.lang.String _fieldName;
-
-      _Fields(short thriftId, java.lang.String fieldName) {
-        _thriftId = thriftId;
-        _fieldName = fieldName;
-      }
-
-      public short getThriftFieldId() {
-        return _thriftId;
-      }
-
-      public java.lang.String getFieldName() {
-        return _fieldName;
+    public void setColumnIsSet(boolean value) {
+      if (!value) {
+        this.column = null;
       }
     }
 
-    // isset id assignments
-    public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
-    static {
-      java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-      tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
-              new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TCell.class))));
-      tmpMap.put(_Fields.IO, new org.apache.thrift.meta_data.FieldMetaData("io", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, IOError.class)));
-      metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
-      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(append_result.class, metaDataMap);
-    }
-
-    public append_result() {
+    /**
+     * the expected value for the column parameter, if not
+     * provided the check is for the non-existence of the
+     * column in question
+     */
+    public byte[] getValue() {
+      setValue(org.apache.thrift.TBaseHelper.rightSize(value));
+      return value == null ? null : value.array();
     }
 
-    public append_result(
-      java.util.List<TCell> success,
-      IOError io)
-    {
-      this();
-      this.success = success;
-      this.io = io;
+    public java.nio.ByteBuffer bufferForValue() {
+      return org.apache.thrift.TBaseHelper.copyBinary(value);
     }
 
     /**
-     * Performs a deep copy on <i>other</i>.
+     * the expected value for the column parameter, if not
+     * provided the check is for the non-existence of the
+     * column in question
      */
-    public append_result(append_result other) {
-      if (other.isSetSuccess()) {
-        java.util.List<TCell> __this__success = new java.util.ArrayList<TCell>(other.success.size());
-        for (TCell other_element : other.success) {
-          __this__success.add(new TCell(other_element));
-        }
-        this.success = __this__success;
-      }
-      if (other.isSetIo()) {
-        this.io = new IOError(other.io);
-      }
-    }
-
-    public append_result deepCopy() {
-      return new append_result(this);
+    public checkAndPut_args setValue(byte[] value) {
+      this.value = value == null ? (java.nio.ByteBuffer)null     : java.nio.ByteBuffer.wrap(value.clone());
+      return this;
     }
 
-    @Override
-    public void clear() {
-      this.success = null;
-      this.io = null;
+    public checkAndPut_args setValue(@org.apache.thrift.annotation.Nullable java.nio.ByteBuffer value) {
+      this.value = org.apache.thrift.TBaseHelper.copyBinary(value);
+      return this;
     }
 
-    public int getSuccessSize() {
-      return (this.success == null) ? 0 : this.success.size();
+    public void unsetValue() {
+      this.value = null;
     }
 
-    @org.apache.thrift.annotation.Nullable
-    public java.util.Iterator<TCell> getSuccessIterator() {
-      return (this.success == null) ? null : this.success.iterator();
+    /** Returns true if field value is set (has been assigned a value) and false otherwise */
+    public boolean isSetValue() {
+      return this.value != null;
     }
 
-    public void addToSuccess(TCell elem) {
-      if (this.success == null) {
-        this.success = new java.util.ArrayList<TCell>();
+    public void setValueIsSet(boolean value) {
+      if (!value) {
+        this.value = null;
       }
-      this.success.add(elem);
     }
 
+    /**
+     * mutation for the put
+     */
     @org.apache.thrift.annotation.Nullable
-    public java.util.List<TCell> getSuccess() {
-      return this.success;
+    public Mutation getMput() {
+      return this.mput;
     }
 
-    public append_result setSuccess(@org.apache.thrift.annotation.Nullable java.util.List<TCell> success) {
-      this.success = success;
+    /**
+     * mutation for the put
+     */
+    public checkAndPut_args setMput(@org.apache.thrift.annotation.Nullable Mutation mput) {
+      this.mput = mput;
       return this;
     }
 
-    public void unsetSuccess() {
-      this.success = null;
+    public void unsetMput() {
+      this.mput = null;
     }
 
-    /** Returns true if field success is set (has been assigned a value) and false otherwise */
-    public boolean isSetSuccess() {
-      return this.success != null;
+    /** Returns true if field mput is set (has been assigned a value) and false otherwise */
+    public boolean isSetMput() {
+      return this.mput != null;
     }
 
-    public void setSuccessIsSet(boolean value) {
+    public void setMputIsSet(boolean value) {
       if (!value) {
-        this.success = null;
+        this.mput = null;
+      }
+    }
+
+    public int getAttributesSize() {
+      return (this.attributes == null) ? 0 : this.attributes.size();
+    }
+
+    public void putToAttributes(java.nio.ByteBuffer key, java.nio.ByteBuffer val) {
+      if (this.attributes == null) {
+        this.attributes = new java.util.HashMap<java.nio.ByteBuffer,java.nio.ByteBuffer>();
       }
+      this.attributes.put(key, val);
     }
 
+    /**
+     * Mutation attributes
+     */
     @org.apache.thrift.annotation.Nullable
-    public IOError getIo() {
-      return this.io;
+    public java.util.Map<java.nio.ByteBuffer,java.nio.ByteBuffer> getAttributes() {
+      return this.attributes;
     }
 
-    public append_result setIo(@org.apache.thrift.annotation.Nullable IOError io) {
-      this.io = io;
+    /**
+     * Mutation attributes
+     */
+    public checkAndPut_args setAttributes(@org.apache.thrift.annotation.Nullable java.util.Map<java.nio.ByteBuffer,java.nio.ByteBuffer> attributes) {
+      this.attributes = attributes;
       return this;
     }
 
-    public void unsetIo() {
-      this.io = null;
+    public void unsetAttributes() {
+      this.attributes = null;
     }
 
-    /** Returns true if field io is set (has been assigned a value) and false otherwise */
-    public boolean isSetIo() {
-      return this.io != null;
+    /** Returns true if field attributes is set (has been assigned a value) and false otherwise */
+    public boolean isSetAttributes() {
+      return this.attributes != null;
     }
 
-    public void setIoIsSet(boolean value) {
+    public void setAttributesIsSet(boolean value) {
       if (!value) {
-        this.io = null;
+        this.attributes = null;
       }
     }
 
     public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
       switch (field) {
-      case SUCCESS:
+      case TABLE_NAME:
         if (value == null) {
-          unsetSuccess();
+          unsetTableName();
         } else {
-          setSuccess((java.util.List<TCell>)value);
+          if (value instanceof byte[]) {
+            setTableName((byte[])value);
+          } else {
+            setTableName((java.nio.ByteBuffer)value);
+          }
         }
         break;
 
-      case IO:
+      case ROW:
         if (value == null) {
-          unsetIo();
+          unsetRow();
         } else {
-          setIo((IOError)value);
+          if (value instanceof byte[]) {
+            setRow((byte[])value);
+          } else {
+            setRow((java.nio.ByteBuffer)value);
+          }
+        }
+        break;
+
+      case COLUMN:
+        if (value == null) {
+          unsetColumn();
+        } else {
+          if (value instanceof byte[]) {
+            setColumn((byte[])value);
+          } else {
+            setColumn((java.nio.ByteBuffer)value);
+          }
+        }
+        break;
+
+      case VALUE:
+        if (value == null) {
+          unsetValue();
+        } else {
+          if (value instanceof byte[]) {
+            setValue((byte[])value);
+          } else {
+            setValue((java.nio.ByteBuffer)value);
+          }
+        }
+        break;
+
+      case MPUT:
+        if (value == null) {
+          unsetMput();
+        } else {
+          setMput((Mutation)value);
+        }
+        break;
+
+      case ATTRIBUTES:
+        if (value == null) {
+          unsetAttributes();
+        } else {
+          setAttributes((java.util.Map<java.nio.ByteBuffer,java.nio.ByteBuffer>)value);
         }
         break;
 
@@ -59174,11 +58813,23 @@ public class Hbase {
     @org.apache.thrift.annotation.Nullable
     public java.lang.Object getFieldValue(_Fields field) {
       switch (field) {
-      case SUCCESS:
-        return getSuccess();
+      case TABLE_NAME:
+        return getTableName();
 
-      case IO:
-        return getIo();
+      case ROW:
+        return getRow();
+
+      case COLUMN:
+        return getColumn();
+
+      case VALUE:
+        return getValue();
+
+      case MPUT:
+        return getMput();
+
+      case ATTRIBUTES:
+        return getAttributes();
 
       }
       throw new java.lang.IllegalStateException();
@@ -59191,10 +58842,18 @@ public class Hbase {
       }
 
       switch (field) {
-      case SUCCESS:
-        return isSetSuccess();
-      case IO:
-        return isSetIo();
+      case TABLE_NAME:
+        return isSetTableName();
+      case ROW:
+        return isSetRow();
+      case COLUMN:
+        return isSetColumn();
+      case VALUE:
+        return isSetValue();
+      case MPUT:
+        return isSetMput();
+      case ATTRIBUTES:
+        return isSetAttributes();
       }
       throw new java.lang.IllegalStateException();
     }
@@ -59203,32 +58862,68 @@ public class Hbase {
     public boolean equals(java.lang.Object that) {
       if (that == null)
         return false;
-      if (that instanceof append_result)
-        return this.equals((append_result)that);
+      if (that instanceof checkAndPut_args)
+        return this.equals((checkAndPut_args)that);
       return false;
     }
 
-    public boolean equals(append_result that) {
+    public boolean equals(checkAndPut_args that) {
       if (that == null)
         return false;
       if (this == that)
         return true;
 
-      boolean this_present_success = true && this.isSetSuccess();
-      boolean that_present_success = true && that.isSetSuccess();
-      if (this_present_success || that_present_success) {
-        if (!(this_present_success && that_present_success))
+      boolean this_present_tableName = true && this.isSetTableName();
+      boolean that_present_tableName = true && that.isSetTableName();
+      if (this_present_tableName || that_present_tableName) {
+        if (!(this_present_tableName && that_present_tableName))
           return false;
-        if (!this.success.equals(that.success))
+        if (!this.tableName.equals(that.tableName))
           return false;
       }
 
-      boolean this_present_io = true && this.isSetIo();
-      boolean that_present_io = true && that.isSetIo();
-      if (this_present_io || that_present_io) {
-        if (!(this_present_io && that_present_io))
+      boolean this_present_row = true && this.isSetRow();
+      boolean that_present_row = true && that.isSetRow();
+      if (this_present_row || that_present_row) {
+        if (!(this_present_row && that_present_row))
           return false;
-        if (!this.io.equals(that.io))
+        if (!this.row.equals(that.row))
+          return false;
+      }
+
+      boolean this_present_column = true && this.isSetColumn();
+      boolean that_present_column = true && that.isSetColumn();
+      if (this_present_column || that_present_column) {
+        if (!(this_present_column && that_present_column))
+          return false;
+        if (!this.column.equals(that.column))
+          return false;
+      }
+
+      boolean this_present_value = true && this.isSetValue();
+      boolean that_present_value = true && that.isSetValue();
+      if (this_present_value || that_present_value) {
+        if (!(this_present_value && that_present_value))
+          return false;
+        if (!this.value.equals(that.value))
+          return false;
+      }
+
+      boolean this_present_mput = true && this.isSetMput();
+      boolean that_present_mput = true && that.isSetMput();
+      if (this_present_mput || that_present_mput) {
+        if (!(this_present_mput && that_present_mput))
+          return false;
+        if (!this.mput.equals(that.mput))
+          return false;
+      }
+
+      boolean this_present_attributes = true && this.isSetAttributes();
+      boolean that_present_attributes = true && that.isSetAttributes();
+      if (this_present_attributes || that_present_attributes) {
+        if (!(this_present_attributes && that_present_attributes))
+          return false;
+        if (!this.attributes.equals(that.attributes))
           return false;
       }
 
@@ -59239,41 +58934,97 @@ public class Hbase {
     public int hashCode() {
       int hashCode = 1;
 
-      hashCode = hashCode * 8191 + ((isSetSuccess()) ? 131071 : 524287);
-      if (isSetSuccess())
-        hashCode = hashCode * 8191 + success.hashCode();
+      hashCode = hashCode * 8191 + ((isSetTableName()) ? 131071 : 524287);
+      if (isSetTableName())
+        hashCode = hashCode * 8191 + tableName.hashCode();
 
-      hashCode = hashCode * 8191 + ((isSetIo()) ? 131071 : 524287);
-      if (isSetIo())
-        hashCode = hashCode * 8191 + io.hashCode();
+      hashCode = hashCode * 8191 + ((isSetRow()) ? 131071 : 524287);
+      if (isSetRow())
+        hashCode = hashCode * 8191 + row.hashCode();
+
+      hashCode = hashCode * 8191 + ((isSetColumn()) ? 131071 : 524287);
+      if (isSetColumn())
+        hashCode = hashCode * 8191 + column.hashCode();
+
+      hashCode = hashCode * 8191 + ((isSetValue()) ? 131071 : 524287);
+      if (isSetValue())
+        hashCode = hashCode * 8191 + value.hashCode();
+
+      hashCode = hashCode * 8191 + ((isSetMput()) ? 131071 : 524287);
+      if (isSetMput())
+        hashCode = hashCode * 8191 + mput.hashCode();
+
+      hashCode = hashCode * 8191 + ((isSetAttributes()) ? 131071 : 524287);
+      if (isSetAttributes())
+        hashCode = hashCode * 8191 + attributes.hashCode();
 
       return hashCode;
     }
 
     @Override
-    public int compareTo(append_result other) {
+    public int compareTo(checkAndPut_args other) {
       if (!getClass().equals(other.getClass())) {
         return getClass().getName().compareTo(other.getClass().getName());
       }
 
       int lastComparison = 0;
 
-      lastComparison = java.lang.Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
+      lastComparison = java.lang.Boolean.valueOf(isSetTableName()).compareTo(other.isSetTableName());
       if (lastComparison != 0) {
         return lastComparison;
       }
-      if (isSetSuccess()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
+      if (isSetTableName()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tableName, other.tableName);
         if (lastComparison != 0) {
           return lastComparison;
         }
       }
-      lastComparison = java.lang.Boolean.valueOf(isSetIo()).compareTo(other.isSetIo());
+      lastComparison = java.lang.Boolean.valueOf(isSetRow()).compareTo(other.isSetRow());
       if (lastComparison != 0) {
         return lastComparison;
       }
-      if (isSetIo()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.io, other.io);
+      if (isSetRow()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.row, other.row);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      lastComparison = java.lang.Boolean.valueOf(isSetColumn()).compareTo(other.isSetColumn());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (isSetColumn()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.column, other.column);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      lastComparison = java.lang.Boolean.valueOf(isSetValue()).compareTo(other.isSetValue());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (isSetValue()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.value, other.value);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      lastComparison = java.lang.Boolean.valueOf(isSetMput()).compareTo(other.isSetMput());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (isSetMput()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.mput, other.mput);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      lastComparison = java.lang.Boolean.valueOf(isSetAttributes()).compareTo(other.isSetAttributes());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (isSetAttributes()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.attributes, other.attributes);
         if (lastComparison != 0) {
           return lastComparison;
         }
@@ -59292,26 +59043,58 @@ public class Hbase {
 
     public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
       scheme(oprot).write(oprot, this);
-      }
+    }
 
     @Override
     public java.lang.String toString() {
-      java.lang.StringBuilder sb = new java.lang.StringBuilder("append_result(");
+      java.lang.StringBuilder sb = new java.lang.StringBuilder("checkAndPut_args(");
       boolean first = true;
 
-      sb.append("success:");
-      if (this.success == null) {
+      sb.append("tableName:");
+      if (this.tableName == null) {
         sb.append("null");
       } else {
-        sb.append(this.success);
+        org.apache.thrift.TBaseHelper.toString(this.tableName, sb);
       }
       first = false;
       if (!first) sb.append(", ");
-      sb.append("io:");
-      if (this.io == null) {
+      sb.append("row:");
+      if (this.row == null) {
         sb.append("null");
       } else {
-        sb.append(this.io);
+        org.apache.thrift.TBaseHelper.toString(this.row, sb);
+      }
+      first = false;
+      if (!first) sb.append(", ");
+      sb.append("column:");
+      if (this.column == null) {
+        sb.append("null");
+      } else {
+        org.apache.thrift.TBaseHelper.toString(this.column, sb);
+      }
+      first = false;
+      if (!first) sb.append(", ");
+      sb.append("value:");
+      if (this.value == null) {
+        sb.append("null");
+      } else {
+        org.apache.thrift.TBaseHelper.toString(this.value, sb);
+      }
+      first = false;
+      if (!first) sb.append(", ");
+      sb.append("mput:");
+      if (this.mput == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.mput);
+      }
+      first = false;
+      if (!first) sb.append(", ");
+      sb.append("attributes:");
+      if (this.attributes == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.attributes);
       }
       first = false;
       sb.append(")");
@@ -59321,6 +59104,9 @@ public class Hbase {
     public void validate() throws org.apache.thrift.TException {
       // check for required fields
       // check for sub-struct validity
+      if (mput != null) {
+        mput.validate();
+      }
     }
 
     private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
@@ -59339,15 +59125,15 @@ public class Hbase {
       }
     }
 
-    private static class append_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-      public append_resultStandardScheme getScheme() {
-        return new append_resultStandardScheme();
+    private static class checkAndPut_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
+      public checkAndPut_argsStandardScheme getScheme() {
+        return new checkAndPut_argsStandardScheme();
       }
     }
 
-    private static class append_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme<append_result> {
+    private static class checkAndPut_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<checkAndPut_args> {
 
-      public void read(org.apache.thrift.protocol.TProtocol iprot, append_result struct) throws org.apache.thrift.TException {
+      public void read(org.apache.thrift.protocol.TProtocol iprot, checkAndPut_args struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TField schemeField;
         iprot.readStructBegin();
         while (true)
@@ -59357,30 +59143,63 @@ public class Hbase {
             break;
           }
           switch (schemeField.id) {
-            case 0: // SUCCESS
-              if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
-                {
-                  org.apache.thrift.protocol.TList _list590 = iprot.readListBegin();
-                  struct.success = new java.util.ArrayList<TCell>(_list590.size);
-                  @org.apache.thrift.annotation.Nullable TCell _elem591;
-                  for (int _i592 = 0; _i592 < _list590.size; ++_i592)
-                  {
-                    _elem591 = new TCell();
-                    _elem591.read(iprot);
-                    struct.success.add(_elem591);
-                  }
-                  iprot.readListEnd();
-                }
-                struct.setSuccessIsSet(true);
+            case 1: // TABLE_NAME
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+                struct.tableName = iprot.readBinary();
+                struct.setTableNameIsSet(true);
               } else { 
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
               break;
-            case 1: // IO
+            case 2: // ROW
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+                struct.row = iprot.readBinary();
+                struct.setRowIsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            case 3: // COLUMN
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+                struct.column = iprot.readBinary();
+                struct.setColumnIsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            case 5: // VALUE
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+                struct.value = iprot.readBinary();
+                struct.setValueIsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            case 6: // MPUT
               if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
-                struct.io = new IOError();
-                struct.io.read(iprot);
-                struct.setIoIsSet(true);
+                struct.mput = new Mutation();
+                struct.mput.read(iprot);
+                struct.setMputIsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            case 7: // ATTRIBUTES
+              if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+                {
+                  org.apache.thrift.protocol.TMap _map590 = iprot.readMapBegin();
+                  struct.attributes = new java.util.HashMap<java.nio.ByteBuffer,java.nio.ByteBuffer>(2*_map590.size);
+                  @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer _key591;
+                  @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer _val592;
+                  for (int _i593 = 0; _i593 < _map590.size; ++_i593)
+                  {
+                    _key591 = iprot.readBinary();
+                    _val592 = iprot.readBinary();
+                    struct.attributes.put(_key591, _val592);
+                  }
+                  iprot.readMapEnd();
+                }
+                struct.setAttributesIsSet(true);
               } else { 
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
@@ -59396,88 +59215,151 @@ public class Hbase {
         struct.validate();
       }
 
-      public void write(org.apache.thrift.protocol.TProtocol oprot, append_result struct) throws org.apache.thrift.TException {
+      public void write(org.apache.thrift.protocol.TProtocol oprot, checkAndPut_args struct) throws org.apache.thrift.TException {
         struct.validate();
 
         oprot.writeStructBegin(STRUCT_DESC);
-        if (struct.success != null) {
-          oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
+        if (struct.tableName != null) {
+          oprot.writeFieldBegin(TABLE_NAME_FIELD_DESC);
+          oprot.writeBinary(struct.tableName);
+          oprot.writeFieldEnd();
+        }
+        if (struct.row != null) {
+          oprot.writeFieldBegin(ROW_FIELD_DESC);
+          oprot.writeBinary(struct.row);
+          oprot.writeFieldEnd();
+        }
+        if (struct.column != null) {
+          oprot.writeFieldBegin(COLUMN_FIELD_DESC);
+          oprot.writeBinary(struct.column);
+          oprot.writeFieldEnd();
+        }
+        if (struct.value != null) {
+          oprot.writeFieldBegin(VALUE_FIELD_DESC);
+          oprot.writeBinary(struct.value);
+          oprot.writeFieldEnd();
+        }
+        if (struct.mput != null) {
+          oprot.writeFieldBegin(MPUT_FIELD_DESC);
+          struct.mput.write(oprot);
+          oprot.writeFieldEnd();
+        }
+        if (struct.attributes != null) {
+          oprot.writeFieldBegin(ATTRIBUTES_FIELD_DESC);
           {
-            oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.success.size()));
-            for (TCell _iter593 : struct.success)
+            oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.attributes.size()));
+            for (java.util.Map.Entry<java.nio.ByteBuffer, java.nio.ByteBuffer> _iter594 : struct.attributes.entrySet())
             {
-              _iter593.write(oprot);
+              oprot.writeBinary(_iter594.getKey());
+              oprot.writeBinary(_iter594.getValue());
             }
-            oprot.writeListEnd();
+            oprot.writeMapEnd();
           }
           oprot.writeFieldEnd();
         }
-        if (struct.io != null) {
-          oprot.writeFieldBegin(IO_FIELD_DESC);
-          struct.io.write(oprot);
-          oprot.writeFieldEnd();
-        }
         oprot.writeFieldStop();
         oprot.writeStructEnd();
       }
 
     }
 
-    private static class append_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-      public append_resultTupleScheme getScheme() {
-        return new append_resultTupleScheme();
+    private static class checkAndPut_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
+      public checkAndPut_argsTupleScheme getScheme() {
+        return new checkAndPut_argsTupleScheme();
       }
     }
 
-    private static class append_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme<append_result> {
+    private static class checkAndPut_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme<checkAndPut_args> {
 
       @Override
-      public void write(org.apache.thrift.protocol.TProtocol prot, append_result struct) throws org.apache.thrift.TException {
+      public void write(org.apache.thrift.protocol.TProtocol prot, checkAndPut_args struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
         java.util.BitSet optionals = new java.util.BitSet();
-        if (struct.isSetSuccess()) {
+        if (struct.isSetTableName()) {
           optionals.set(0);
         }
-        if (struct.isSetIo()) {
+        if (struct.isSetRow()) {
           optionals.set(1);
         }
-        oprot.writeBitSet(optionals, 2);
-        if (struct.isSetSuccess()) {
+        if (struct.isSetColumn()) {
+          optionals.set(2);
+        }
+        if (struct.isSetValue()) {
+          optionals.set(3);
+        }
+        if (struct.isSetMput()) {
+          optionals.set(4);
+        }
+        if (struct.isSetAttributes()) {
+          optionals.set(5);
+        }
+        oprot.writeBitSet(optionals, 6);
+        if (struct.isSetTableName()) {
+          oprot.writeBinary(struct.tableName);
+        }
+        if (struct.isSetRow()) {
+          oprot.writeBinary(struct.row);
+        }
+        if (struct.isSetColumn()) {
+          oprot.writeBinary(struct.column);
+        }
+        if (struct.isSetValue()) {
+          oprot.writeBinary(struct.value);
+        }
+        if (struct.isSetMput()) {
+          struct.mput.write(oprot);
+        }
+        if (struct.isSetAttributes()) {
           {
-            oprot.writeI32(struct.success.size());
-            for (TCell _iter594 : struct.success)
+            oprot.writeI32(struct.attributes.size());
+            for (java.util.Map.Entry<java.nio.ByteBuffer, java.nio.ByteBuffer> _iter595 : struct.attributes.entrySet())
             {
-              _iter594.write(oprot);
+              oprot.writeBinary(_iter595.getKey());
+              oprot.writeBinary(_iter595.getValue());
             }
           }
         }
-        if (struct.isSetIo()) {
-          struct.io.write(oprot);
-        }
       }
 
       @Override
-      public void read(org.apache.thrift.protocol.TProtocol prot, append_result struct) throws org.apache.thrift.TException {
+      public void read(org.apache.thrift.protocol.TProtocol prot, checkAndPut_args struct) throws org.apache.thrift.TException {
         org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
-        java.util.BitSet incoming = iprot.readBitSet(2);
+        java.util.BitSet incoming = iprot.readBitSet(6);
         if (incoming.get(0)) {
+          struct.tableName = iprot.readBinary();
+          struct.setTableNameIsSet(true);
+        }
+        if (incoming.get(1)) {
+          struct.row = iprot.readBinary();
+          struct.setRowIsSet(true);
+        }
+        if (incoming.get(2)) {
+          struct.column = iprot.readBinary();
+          struct.setColumnIsSet(true);
+        }
+        if (incoming.get(3)) {
+          struct.value = iprot.readBinary();
+          struct.setValueIsSet(true);
+        }
+        if (incoming.get(4)) {
+          struct.mput = new Mutation();
+          struct.mput.read(iprot);
+          struct.setMputIsSet(true);
+        }
+        if (incoming.get(5)) {
           {
-            org.apache.thrift.protocol.TList _list595 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-            struct.success = new java.util.ArrayList<TCell>(_list595.size);
-            @org.apache.thrift.annotation.Nullable TCell _elem596;
-            for (int _i597 = 0; _i597 < _list595.size; ++_i597)
+            org.apache.thrift.protocol.TMap _map596 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+            struct.attributes = new java.util.HashMap<java.nio.ByteBuffer,java.nio.ByteBuffer>(2*_map596.size);
+            @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer _key597;
+            @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer _val598;
+            for (int _i599 = 0; _i599 < _map596.size; ++_i599)
             {
-              _elem596 = new TCell();
-              _elem596.read(iprot);
-              struct.success.add(_elem596);
+              _key597 = iprot.readBinary();
+              _val598 = iprot.readBinary();
+              struct.attributes.put(_key597, _val598);
             }
           }
-          struct.setSuccessIsSet(true);
-        }
-        if (incoming.get(1)) {
-          struct.io = new IOError();
-          struct.io.read(iprot);
-          struct.setIoIsSet(true);
+          struct.setAttributesIsSet(true);
         }
       }
     }
@@ -59487,74 +59369,25 @@ public class Hbase {
     }
   }
 
-  public static class checkAndPut_args implements org.apache.thrift.TBase<checkAndPut_args, checkAndPut_args._Fields>, java.io.Serializable, Cloneable, Comparable<checkAndPut_args>   {
-    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("checkAndPut_args");
+  public static class checkAndPut_result implements org.apache.thrift.TBase<checkAndPut_result, checkAndPut_result._Fields>, java.io.Serializable, Cloneable, Comparable<checkAndPut_result>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("checkAndPut_result");
 
-    private static final org.apache.thrift.protocol.TField TABLE_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("tableName", org.apache.thrift.protocol.TType.STRING, (short)1);
-    private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)2);
-    private static final org.apache.thrift.protocol.TField COLUMN_FIELD_DESC = new org.apache.thrift.protocol.TField("column", org.apache.thrift.protocol.TType.STRING, (short)3);
-    private static final org.apache.thrift.protocol.TField VALUE_FIELD_DESC = new org.apache.thrift.protocol.TField("value", org.apache.thrift.protocol.TType.STRING, (short)5);
-    private static final org.apache.thrift.protocol.TField MPUT_FIELD_DESC = new org.apache.thrift.protocol.TField("mput", org.apache.thrift.protocol.TType.STRUCT, (short)6);
-    private static final org.apache.thrift.protocol.TField ATTRIBUTES_FIELD_DESC = new org.apache.thrift.protocol.TField("attributes", org.apache.thrift.protocol.TType.MAP, (short)7);
+    private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.BOOL, (short)0);
+    private static final org.apache.thrift.protocol.TField IO_FIELD_DESC = new org.apache.thrift.protocol.TField("io", org.apache.thrift.protocol.TType.STRUCT, (short)1);
+    private static final org.apache.thrift.protocol.TField IA_FIELD_DESC = new org.apache.thrift.protocol.TField("ia", org.apache.thrift.protocol.TType.STRUCT, (short)2);
 
-    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new checkAndPut_argsStandardSchemeFactory();
-    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new checkAndPut_argsTupleSchemeFactory();
+    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new checkAndPut_resultStandardSchemeFactory();
+    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new checkAndPut_resultTupleSchemeFactory();
 
-    /**
-     * name of table
-     */
-    public @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer tableName; // required
-    /**
-     * row key
-     */
-    public @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer row; // required
-    /**
-     * column name
-     */
-    public @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer column; // required
-    /**
-     * the expected value for the column parameter, if not
-     * provided the check is for the non-existence of the
-     * column in question
-     */
-    public @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer value; // required
-    /**
-     * mutation for the put
-     */
-    public @org.apache.thrift.annotation.Nullable Mutation mput; // required
-    /**
-     * Mutation attributes
-     */
-    public @org.apache.thrift.annotation.Nullable java.util.Map<java.nio.ByteBuffer,java.nio.ByteBuffer> attributes; // required
+    public boolean success; // required
+    public @org.apache.thrift.annotation.Nullable IOError io; // required
+    public @org.apache.thrift.annotation.Nullable IllegalArgument ia; // required
 
     /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
     public enum _Fields implements org.apache.thrift.TFieldIdEnum {
-      /**
-       * name of table
-       */
-      TABLE_NAME((short)1, "tableName"),
-      /**
-       * row key
-       */
-      ROW((short)2, "row"),
-      /**
-       * column name
-       */
-      COLUMN((short)3, "column"),
-      /**
-       * the expected value for the column parameter, if not
-       * provided the check is for the non-existence of the
-       * column in question
-       */
-      VALUE((short)5, "value"),
-      /**
-       * mutation for the put
-       */
-      MPUT((short)6, "mput"),
-      /**
-       * Mutation attributes
-       */
-      ATTRIBUTES((short)7, "attributes");
+      SUCCESS((short)0, "success"),
+      IO((short)1, "io"),
+      IA((short)2, "ia");
 
       private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
 
@@ -59570,18 +59403,12 @@ public class Hbase {
       @org.apache.thrift.annotation.Nullable
       public static _Fields findByThriftId(int fieldId) {
         switch(fieldId) {
-          case 1: // TABLE_NAME
-            return TABLE_NAME;
-          case 2: // ROW
-            return ROW;
-          case 3: // COLUMN
-            return COLUMN;
-          case 5: // VALUE
-            return VALUE;
-          case 6: // MPUT
-            return MPUT;
-          case 7: // ATTRIBUTES
-            return ATTRIBUTES;
+          case 0: // SUCCESS
+            return SUCCESS;
+          case 1: // IO
+            return IO;
+          case 2: // IA
+            return IA;
           default:
             return null;
         }
@@ -59623,397 +59450,158 @@ public class Hbase {
     }
 
     // isset id assignments
+    private static final int __SUCCESS_ISSET_ID = 0;
+    private byte __isset_bitfield = 0;
     public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
     static {
       java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-      tmpMap.put(_Fields.TABLE_NAME, new org.apache.thrift.meta_data.FieldMetaData("tableName", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING          , "Text")));
-      tmpMap.put(_Fields.ROW, new org.apache.thrift.meta_data.FieldMetaData("row", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING          , "Text")));
-      tmpMap.put(_Fields.COLUMN, new org.apache.thrift.meta_data.FieldMetaData("column", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING          , "Text")));
-      tmpMap.put(_Fields.VALUE, new org.apache.thrift.meta_data.FieldMetaData("value", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING          , "Text")));
-      tmpMap.put(_Fields.MPUT, new org.apache.thrift.meta_data.FieldMetaData("mput", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, Mutation.class)));
-      tmpMap.put(_Fields.ATTRIBUTES, new org.apache.thrift.meta_data.FieldMetaData("attributes", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, 
-              new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING              , "Text"), 
-              new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING              , "Text"))));
+      tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
+      tmpMap.put(_Fields.IO, new org.apache.thrift.meta_data.FieldMetaData("io", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, IOError.class)));
+      tmpMap.put(_Fields.IA, new org.apache.thrift.meta_data.FieldMetaData("ia", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, IllegalArgument.class)));
       metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
-      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(checkAndPut_args.class, metaDataMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(checkAndPut_result.class, metaDataMap);
     }
 
-    public checkAndPut_args() {
+    public checkAndPut_result() {
     }
 
-    public checkAndPut_args(
-      java.nio.ByteBuffer tableName,
-      java.nio.ByteBuffer row,
-      java.nio.ByteBuffer column,
-      java.nio.ByteBuffer value,
-      Mutation mput,
-      java.util.Map<java.nio.ByteBuffer,java.nio.ByteBuffer> attributes)
+    public checkAndPut_result(
+      boolean success,
+      IOError io,
+      IllegalArgument ia)
     {
       this();
-      this.tableName = org.apache.thrift.TBaseHelper.copyBinary(tableName);
-      this.row = org.apache.thrift.TBaseHelper.copyBinary(row);
-      this.column = org.apache.thrift.TBaseHelper.copyBinary(column);
-      this.value = org.apache.thrift.TBaseHelper.copyBinary(value);
-      this.mput = mput;
-      this.attributes = attributes;
+      this.success = success;
+      setSuccessIsSet(true);
+      this.io = io;
+      this.ia = ia;
     }
 
     /**
      * Performs a deep copy on <i>other</i>.
      */
-    public checkAndPut_args(checkAndPut_args other) {
-      if (other.isSetTableName()) {
-        this.tableName = org.apache.thrift.TBaseHelper.copyBinary(other.tableName);
-      }
-      if (other.isSetRow()) {
-        this.row = org.apache.thrift.TBaseHelper.copyBinary(other.row);
-      }
-      if (other.isSetColumn()) {
-        this.column = org.apache.thrift.TBaseHelper.copyBinary(other.column);
-      }
-      if (other.isSetValue()) {
-        this.value = org.apache.thrift.TBaseHelper.copyBinary(other.value);
-      }
-      if (other.isSetMput()) {
-        this.mput = new Mutation(other.mput);
+    public checkAndPut_result(checkAndPut_result other) {
+      __isset_bitfield = other.__isset_bitfield;
+      this.success = other.success;
+      if (other.isSetIo()) {
+        this.io = new IOError(other.io);
       }
-      if (other.isSetAttributes()) {
-        java.util.Map<java.nio.ByteBuffer,java.nio.ByteBuffer> __this__attributes = new java.util.HashMap<java.nio.ByteBuffer,java.nio.ByteBuffer>(other.attributes.size());
-        for (java.util.Map.Entry<java.nio.ByteBuffer, java.nio.ByteBuffer> other_element : other.attributes.entrySet()) {
-
-          java.nio.ByteBuffer other_element_key = other_element.getKey();
-          java.nio.ByteBuffer other_element_value = other_element.getValue();
-
-          java.nio.ByteBuffer __this__attributes_copy_key = org.apache.thrift.TBaseHelper.copyBinary(other_element_key);
-
-          java.nio.ByteBuffer __this__attributes_copy_value = org.apache.thrift.TBaseHelper.copyBinary(other_element_value);
-
-          __this__attributes.put(__this__attributes_copy_key, __this__attributes_copy_value);
-        }
-        this.attributes = __this__attributes;
+      if (other.isSetIa()) {
+        this.ia = new IllegalArgument(other.ia);
       }
     }
 
-    public checkAndPut_args deepCopy() {
-      return new checkAndPut_args(this);
+    public checkAndPut_result deepCopy() {
+      return new checkAndPut_result(this);
     }
 
     @Override
     public void clear() {
-      this.tableName = null;
-      this.row = null;
-      this.column = null;
-      this.value = null;
-      this.mput = null;
-      this.attributes = null;
-    }
-
-    /**
-     * name of table
-     */
-    public byte[] getTableName() {
-      setTableName(org.apache.thrift.TBaseHelper.rightSize(tableName));
-      return tableName == null ? null : tableName.array();
-    }
-
-    public java.nio.ByteBuffer bufferForTableName() {
-      return org.apache.thrift.TBaseHelper.copyBinary(tableName);
-    }
-
-    /**
-     * name of table
-     */
-    public checkAndPut_args setTableName(byte[] tableName) {
-      this.tableName = tableName == null ? (java.nio.ByteBuffer)null     : java.nio.ByteBuffer.wrap(tableName.clone());
-      return this;
-    }
-
-    public checkAndPut_args setTableName(@org.apache.thrift.annotation.Nullable java.nio.ByteBuffer tableName) {
-      this.tableName = org.apache.thrift.TBaseHelper.copyBinary(tableName);
-      return this;
-    }
-
-    public void unsetTableName() {
-      this.tableName = null;
-    }
-
-    /** Returns true if field tableName is set (has been assigned a value) and false otherwise */
-    public boolean isSetTableName() {
-      return this.tableName != null;
-    }
-
-    public void setTableNameIsSet(boolean value) {
-      if (!value) {
-        this.tableName = null;
-      }
-    }
-
-    /**
-     * row key
-     */
-    public byte[] getRow() {
-      setRow(org.apache.thrift.TBaseHelper.rightSize(row));
-      return row == null ? null : row.array();
-    }
-
-    public java.nio.ByteBuffer bufferForRow() {
-      return org.apache.thrift.TBaseHelper.copyBinary(row);
-    }
-
-    /**
-     * row key
-     */
-    public checkAndPut_args setRow(byte[] row) {
-      this.row = row == null ? (java.nio.ByteBuffer)null     : java.nio.ByteBuffer.wrap(row.clone());
-      return this;
-    }
-
-    public checkAndPut_args setRow(@org.apache.thrift.annotation.Nullable java.nio.ByteBuffer row) {
-      this.row = org.apache.thrift.TBaseHelper.copyBinary(row);
-      return this;
-    }
-
-    public void unsetRow() {
-      this.row = null;
-    }
-
-    /** Returns true if field row is set (has been assigned a value) and false otherwise */
-    public boolean isSetRow() {
-      return this.row != null;
-    }
-
-    public void setRowIsSet(boolean value) {
-      if (!value) {
-        this.row = null;
-      }
-    }
-
-    /**
-     * column name
-     */
-    public byte[] getColumn() {
-      setColumn(org.apache.thrift.TBaseHelper.rightSize(column));
-      return column == null ? null : column.array();
-    }
-
-    public java.nio.ByteBuffer bufferForColumn() {
-      return org.apache.thrift.TBaseHelper.copyBinary(column);
-    }
-
-    /**
-     * column name
-     */
-    public checkAndPut_args setColumn(byte[] column) {
-      this.column = column == null ? (java.nio.ByteBuffer)null     : java.nio.ByteBuffer.wrap(column.clone());
-      return this;
-    }
-
-    public checkAndPut_args setColumn(@org.apache.thrift.annotation.Nullable java.nio.ByteBuffer column) {
-      this.column = org.apache.thrift.TBaseHelper.copyBinary(column);
-      return this;
-    }
-
-    public void unsetColumn() {
-      this.column = null;
-    }
-
-    /** Returns true if field column is set (has been assigned a value) and false otherwise */
-    public boolean isSetColumn() {
-      return this.column != null;
-    }
-
-    public void setColumnIsSet(boolean value) {
-      if (!value) {
-        this.column = null;
-      }
-    }
-
-    /**
-     * the expected value for the column parameter, if not
-     * provided the check is for the non-existence of the
-     * column in question
-     */
-    public byte[] getValue() {
-      setValue(org.apache.thrift.TBaseHelper.rightSize(value));
-      return value == null ? null : value.array();
-    }
-
-    public java.nio.ByteBuffer bufferForValue() {
-      return org.apache.thrift.TBaseHelper.copyBinary(value);
+      setSuccessIsSet(false);
+      this.success = false;
+      this.io = null;
+      this.ia = null;
     }
 
-    /**
-     * the expected value for the column parameter, if not
-     * provided the check is for the non-existence of the
-     * column in question
-     */
-    public checkAndPut_args setValue(byte[] value) {
-      this.value = value == null ? (java.nio.ByteBuffer)null     : java.nio.ByteBuffer.wrap(value.clone());
-      return this;
+    public boolean isSuccess() {
+      return this.success;
     }
 
-    public checkAndPut_args setValue(@org.apache.thrift.annotation.Nullable java.nio.ByteBuffer value) {
-      this.value = org.apache.thrift.TBaseHelper.copyBinary(value);
+    public checkAndPut_result setSuccess(boolean success) {
+      this.success = success;
+      setSuccessIsSet(true);
       return this;
     }
 
-    public void unsetValue() {
-      this.value = null;
+    public void unsetSuccess() {
+      __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __SUCCESS_ISSET_ID);
     }
 
-    /** Returns true if field value is set (has been assigned a value) and false otherwise */
-    public boolean isSetValue() {
-      return this.value != null;
+    /** Returns true if field success is set (has been assigned a value) and false otherwise */
+    public boolean isSetSuccess() {
+      return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __SUCCESS_ISSET_ID);
     }
 
-    public void setValueIsSet(boolean value) {
-      if (!value) {
-        this.value = null;
-      }
+    public void setSuccessIsSet(boolean value) {
+      __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __SUCCESS_ISSET_ID, value);
     }
 
-    /**
-     * mutation for the put
-     */
     @org.apache.thrift.annotation.Nullable
-    public Mutation getMput() {
-      return this.mput;
+    public IOError getIo() {
+      return this.io;
     }
 
-    /**
-     * mutation for the put
-     */
-    public checkAndPut_args setMput(@org.apache.thrift.annotation.Nullable Mutation mput) {
-      this.mput = mput;
+    public checkAndPut_result setIo(@org.apache.thrift.annotation.Nullable IOError io) {
+      this.io = io;
       return this;
     }
 
-    public void unsetMput() {
-      this.mput = null;
+    public void unsetIo() {
+      this.io = null;
     }
 
-    /** Returns true if field mput is set (has been assigned a value) and false otherwise */
-    public boolean isSetMput() {
-      return this.mput != null;
+    /** Returns true if field io is set (has been assigned a value) and false otherwise */
+    public boolean isSetIo() {
+      return this.io != null;
     }
 
-    public void setMputIsSet(boolean value) {
+    public void setIoIsSet(boolean value) {
       if (!value) {
-        this.mput = null;
-      }
-    }
-
-    public int getAttributesSize() {
-      return (this.attributes == null) ? 0 : this.attributes.size();
-    }
-
-    public void putToAttributes(java.nio.ByteBuffer key, java.nio.ByteBuffer val) {
-      if (this.attributes == null) {
-        this.attributes = new java.util.HashMap<java.nio.ByteBuffer,java.nio.ByteBuffer>();
+        this.io = null;
       }
-      this.attributes.put(key, val);
     }
 
-    /**
-     * Mutation attributes
-     */
     @org.apache.thrift.annotation.Nullable
-    public java.util.Map<java.nio.ByteBuffer,java.nio.ByteBuffer> getAttributes() {
-      return this.attributes;
+    public IllegalArgument getIa() {
+      return this.ia;
     }
 
-    /**
-     * Mutation attributes
-     */
-    public checkAndPut_args setAttributes(@org.apache.thrift.annotation.Nullable java.util.Map<java.nio.ByteBuffer,java.nio.ByteBuffer> attributes) {
-      this.attributes = attributes;
+    public checkAndPut_result setIa(@org.apache.thrift.annotation.Nullable IllegalArgument ia) {
+      this.ia = ia;
       return this;
     }
 
-    public void unsetAttributes() {
-      this.attributes = null;
+    public void unsetIa() {
+      this.ia = null;
     }
 
-    /** Returns true if field attributes is set (has been assigned a value) and false otherwise */
-    public boolean isSetAttributes() {
-      return this.attributes != null;
+    /** Returns true if field ia is set (has been assigned a value) and false otherwise */
+    public boolean isSetIa() {
+      return this.ia != null;
     }
 
-    public void setAttributesIsSet(boolean value) {
+    public void setIaIsSet(boolean value) {
       if (!value) {
-        this.attributes = null;
+        this.ia = null;
       }
     }
 
     public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
       switch (field) {
-      case TABLE_NAME:
-        if (value == null) {
-          unsetTableName();
-        } else {
-          if (value instanceof byte[]) {
-            setTableName((byte[])value);
-          } else {
-            setTableName((java.nio.ByteBuffer)value);
-          }
-        }
-        break;
-
-      case ROW:
-        if (value == null) {
-          unsetRow();
-        } else {
-          if (value instanceof byte[]) {
-            setRow((byte[])value);
-          } else {
-            setRow((java.nio.ByteBuffer)value);
-          }
-        }
-        break;
-
-      case COLUMN:
-        if (value == null) {
-          unsetColumn();
-        } else {
-          if (value instanceof byte[]) {
-            setColumn((byte[])value);
-          } else {
-            setColumn((java.nio.ByteBuffer)value);
-          }
-        }
-        break;
-
-      case VALUE:
+      case SUCCESS:
         if (value == null) {
-          unsetValue();
+          unsetSuccess();
         } else {
-          if (value instanceof byte[]) {
-            setValue((byte[])value);
-          } else {
-            setValue((java.nio.ByteBuffer)value);
-          }
+          setSuccess((java.lang.Boolean)value);
         }
         break;
 
-      case MPUT:
+      case IO:
         if (value == null) {
-          unsetMput();
+          unsetIo();
         } else {
-          setMput((Mutation)value);
+          setIo((IOError)value);
         }
         break;
 
-      case ATTRIBUTES:
+      case IA:
         if (value == null) {
-          unsetAttributes();
+          unsetIa();
         } else {
-          setAttributes((java.util.Map<java.nio.ByteBuffer,java.nio.ByteBuffer>)value);
+          setIa((IllegalArgument)value);
         }
         break;
 
@@ -60023,23 +59611,14 @@ public class Hbase {
     @org.apache.thrift.annotation.Nullable
     public java.lang.Object getFieldValue(_Fields field) {
       switch (field) {
-      case TABLE_NAME:
-        return getTableName();
-
-      case ROW:
-        return getRow();
-
-      case COLUMN:
-        return getColumn();
-
-      case VALUE:
-        return getValue();
+      case SUCCESS:
+        return isSuccess();
 
-      case MPUT:
-        return getMput();
+      case IO:
+        return getIo();
 
-      case ATTRIBUTES:
-        return getAttributes();
+      case IA:
+        return getIa();
 
       }
       throw new java.lang.IllegalStateException();
@@ -60052,18 +59631,12 @@ public class Hbase {
       }
 
       switch (field) {
-      case TABLE_NAME:
-        return isSetTableName();
-      case ROW:
-        return isSetRow();
-      case COLUMN:
-        return isSetColumn();
-      case VALUE:
-        return isSetValue();
-      case MPUT:
-        return isSetMput();
-      case ATTRIBUTES:
-        return isSetAttributes();
+      case SUCCESS:
+        return isSetSuccess();
+      case IO:
+        return isSetIo();
+      case IA:
+        return isSetIa();
       }
       throw new java.lang.IllegalStateException();
     }
@@ -60072,68 +59645,41 @@ public class Hbase {
     public boolean equals(java.lang.Object that) {
       if (that == null)
         return false;
-      if (that instanceof checkAndPut_args)
-        return this.equals((checkAndPut_args)that);
+      if (that instanceof checkAndPut_result)
+        return this.equals((checkAndPut_result)that);
       return false;
     }
 
-    public boolean equals(checkAndPut_args that) {
+    public boolean equals(checkAndPut_result that) {
       if (that == null)
         return false;
       if (this == that)
         return true;
 
-      boolean this_present_tableName = true && this.isSetTableName();
-      boolean that_present_tableName = true && that.isSetTableName();
-      if (this_present_tableName || that_present_tableName) {
-        if (!(this_present_tableName && that_present_tableName))
-          return false;
-        if (!this.tableName.equals(that.tableName))
-          return false;
-      }
-
-      boolean this_present_row = true && this.isSetRow();
-      boolean that_present_row = true && that.isSetRow();
-      if (this_present_row || that_present_row) {
-        if (!(this_present_row && that_present_row))
-          return false;
-        if (!this.row.equals(that.row))
-          return false;
-      }
-
-      boolean this_present_column = true && this.isSetColumn();
-      boolean that_present_column = true && that.isSetColumn();
-      if (this_present_column || that_present_column) {
-        if (!(this_present_column && that_present_column))
-          return false;
-        if (!this.column.equals(that.column))
-          return false;
-      }
-
-      boolean this_present_value = true && this.isSetValue();
-      boolean that_present_value = true && that.isSetValue();
-      if (this_present_value || that_present_value) {
-        if (!(this_present_value && that_present_value))
+      boolean this_present_success = true;
+      boolean that_present_success = true;
+      if (this_present_success || that_present_success) {
+        if (!(this_present_success && that_present_success))
           return false;
-        if (!this.value.equals(that.value))
+        if (this.success != that.success)
           return false;
       }
 
-      boolean this_present_mput = true && this.isSetMput();
-      boolean that_present_mput = true && that.isSetMput();
-      if (this_present_mput || that_present_mput) {
-        if (!(this_present_mput && that_present_mput))
+      boolean this_present_io = true && this.isSetIo();
+      boolean that_present_io = true && that.isSetIo();
+      if (this_present_io || that_present_io) {
+        if (!(this_present_io && that_present_io))
           return false;
-        if (!this.mput.equals(that.mput))
+        if (!this.io.equals(that.io))
           return false;
       }
 
-      boolean this_present_attributes = true && this.isSetAttributes();
-      boolean that_present_attributes = true && that.isSetAttributes();
-      if (this_present_attributes || that_present_attributes) {
-        if (!(this_present_attributes && that_present_attributes))
+      boolean this_present_ia = true && this.isSetIa();
+      boolean that_present_ia = true && that.isSetIa();
+      if (this_present_ia || that_present_ia) {
+        if (!(this_present_ia && that_present_ia))
           return false;
-        if (!this.attributes.equals(that.attributes))
+        if (!this.ia.equals(that.ia))
           return false;
       }
 
@@ -60144,97 +59690,53 @@ public class Hbase {
     public int hashCode() {
       int hashCode = 1;
 
-      hashCode = hashCode * 8191 + ((isSetTableName()) ? 131071 : 524287);
-      if (isSetTableName())
-        hashCode = hashCode * 8191 + tableName.hashCode();
-
-      hashCode = hashCode * 8191 + ((isSetRow()) ? 131071 : 524287);
-      if (isSetRow())
-        hashCode = hashCode * 8191 + row.hashCode();
-
-      hashCode = hashCode * 8191 + ((isSetColumn()) ? 131071 : 524287);
-      if (isSetColumn())
-        hashCode = hashCode * 8191 + column.hashCode();
-
-      hashCode = hashCode * 8191 + ((isSetValue()) ? 131071 : 524287);
-      if (isSetValue())
-        hashCode = hashCode * 8191 + value.hashCode();
+      hashCode = hashCode * 8191 + ((success) ? 131071 : 524287);
 
-      hashCode = hashCode * 8191 + ((isSetMput()) ? 131071 : 524287);
-      if (isSetMput())
-        hashCode = hashCode * 8191 + mput.hashCode();
+      hashCode = hashCode * 8191 + ((isSetIo()) ? 131071 : 524287);
+      if (isSetIo())
+        hashCode = hashCode * 8191 + io.hashCode();
 
-      hashCode = hashCode * 8191 + ((isSetAttributes()) ? 131071 : 524287);
-      if (isSetAttributes())
-        hashCode = hashCode * 8191 + attributes.hashCode();
+      hashCode = hashCode * 8191 + ((isSetIa()) ? 131071 : 524287);
+      if (isSetIa())
+        hashCode = hashCode * 8191 + ia.hashCode();
 
       return hashCode;
     }
 
     @Override
-    public int compareTo(checkAndPut_args other) {
+    public int compareTo(checkAndPut_result other) {
       if (!getClass().equals(other.getClass())) {
         return getClass().getName().compareTo(other.getClass().getName());
       }
 
       int lastComparison = 0;
 
-      lastComparison = java.lang.Boolean.valueOf(isSetTableName()).compareTo(other.isSetTableName());
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-      if (isSetTableName()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tableName, other.tableName);
-        if (lastComparison != 0) {
-          return lastComparison;
-        }
-      }
-      lastComparison = java.lang.Boolean.valueOf(isSetRow()).compareTo(other.isSetRow());
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-      if (isSetRow()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.row, other.row);
-        if (lastComparison != 0) {
-          return lastComparison;
-        }
-      }
-      lastComparison = java.lang.Boolean.valueOf(isSetColumn()).compareTo(other.isSetColumn());
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-      if (isSetColumn()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.column, other.column);
-        if (lastComparison != 0) {
-          return lastComparison;
-        }
-      }
-      lastComparison = java.lang.Boolean.valueOf(isSetValue()).compareTo(other.isSetValue());
+      lastComparison = java.lang.Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
       if (lastComparison != 0) {
         return lastComparison;
       }
-      if (isSetValue()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.value, other.value);
+      if (isSetSuccess()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
         if (lastComparison != 0) {
           return lastComparison;
         }
       }
-      lastComparison = java.lang.Boolean.valueOf(isSetMput()).compareTo(other.isSetMput());
+      lastComparison = java.lang.Boolean.valueOf(isSetIo()).compareTo(other.isSetIo());
       if (lastComparison != 0) {
         return lastComparison;
       }
-      if (isSetMput()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.mput, other.mput);
+      if (isSetIo()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.io, other.io);
         if (lastComparison != 0) {
           return lastComparison;
         }
       }
-      lastComparison = java.lang.Boolean.valueOf(isSetAttributes()).compareTo(other.isSetAttributes());
+      lastComparison = java.lang.Boolean.valueOf(isSetIa()).compareTo(other.isSetIa());
       if (lastComparison != 0) {
         return lastComparison;
       }
-      if (isSetAttributes()) {
-        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.attributes, other.attributes);
+      if (isSetIa()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.ia, other.ia);
         if (lastComparison != 0) {
           return lastComparison;
         }
@@ -60253,60 +59755,364 @@ public class Hbase {
 
     public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
       scheme(oprot).write(oprot, this);
-    }
+      }
 
     @Override
     public java.lang.String toString() {
-      java.lang.StringBuilder sb = new java.lang.StringBuilder("checkAndPut_args(");
+      java.lang.StringBuilder sb = new java.lang.StringBuilder("checkAndPut_result(");
       boolean first = true;
 
-      sb.append("tableName:");
-      if (this.tableName == null) {
-        sb.append("null");
-      } else {
-        org.apache.thrift.TBaseHelper.toString(this.tableName, sb);
-      }
+      sb.append("success:");
+      sb.append(this.success);
       first = false;
       if (!first) sb.append(", ");
-      sb.append("row:");
-      if (this.row == null) {
+      sb.append("io:");
+      if (this.io == null) {
         sb.append("null");
       } else {
-        org.apache.thrift.TBaseHelper.toString(this.row, sb);
+        sb.append(this.io);
       }
       first = false;
       if (!first) sb.append(", ");
-      sb.append("column:");
-      if (this.column == null) {
+      sb.append("ia:");
+      if (this.ia == null) {
         sb.append("null");
       } else {
-        org.apache.thrift.TBaseHelper.toString(this.column, sb);
+        sb.append(this.ia);
       }
       first = false;
-      if (!first) sb.append(", ");
-      sb.append("value:");
-      if (this.value == null) {
-        sb.append("null");
-      } else {
-        org.apache.thrift.TBaseHelper.toString(this.value, sb);
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
       }
-      first = false;
-      if (!first) sb.append(", ");
-      sb.append("mput:");
-      if (this.mput == null) {
-        sb.append("null");
-      } else {
-        sb.append(this.mput);
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
+      try {
+        // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+        __isset_bitfield = 0;
+        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
       }
-      first = false;
-      if (!first) sb.append(", ");
-      sb.append("attributes:");
-      if (this.attributes == null) {
-        sb.append("null");
-      } else {
-        sb.append(this.attributes);
+    }
+
+    private static class checkAndPut_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
+      public checkAndPut_resultStandardScheme getScheme() {
+        return new checkAndPut_resultStandardScheme();
+      }
+    }
+
+    private static class checkAndPut_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme<checkAndPut_result> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, checkAndPut_result struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            case 0: // SUCCESS
+              if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
+                struct.success = iprot.readBool();
+                struct.setSuccessIsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            case 1: // IO
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+                struct.io = new IOError();
+                struct.io.read(iprot);
+                struct.setIoIsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            case 2: // IA
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+                struct.ia = new IllegalArgument();
+                struct.ia.read(iprot);
+                struct.setIaIsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+
+        // check for required fields of primitive type, which can't be checked in the validate method
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, checkAndPut_result struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        if (struct.isSetSuccess()) {
+          oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
+          oprot.writeBool(struct.success);
+          oprot.writeFieldEnd();
+        }
+        if (struct.io != null) {
+          oprot.writeFieldBegin(IO_FIELD_DESC);
+          struct.io.write(oprot);
+          oprot.writeFieldEnd();
+        }
+        if (struct.ia != null) {
+          oprot.writeFieldBegin(IA_FIELD_DESC);
+          struct.ia.write(oprot);
+          oprot.writeFieldEnd();
+        }
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class checkAndPut_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
+      public checkAndPut_resultTupleScheme getScheme() {
+        return new checkAndPut_resultTupleScheme();
+      }
+    }
+
+    private static class checkAndPut_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme<checkAndPut_result> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, checkAndPut_result struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
+        java.util.BitSet optionals = new java.util.BitSet();
+        if (struct.isSetSuccess()) {
+          optionals.set(0);
+        }
+        if (struct.isSetIo()) {
+          optionals.set(1);
+        }
+        if (struct.isSetIa()) {
+          optionals.set(2);
+        }
+        oprot.writeBitSet(optionals, 3);
+        if (struct.isSetSuccess()) {
+          oprot.writeBool(struct.success);
+        }
... 84406 lines suppressed ...

Mime
View raw message