hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ecl...@apache.org
Subject [38/38] git commit: HBASE-12197 Move rest to it's on module
Date Fri, 10 Oct 2014 16:53:54 GMT
HBASE-12197 Move rest to it's on module

Summary:
Move hbase-rest to it's own module.
copy test resources from hbase-server into hbase-rest's test resources.

Test Plan: Unit tests pass of hbase-rest module.

Differential Revision: https://reviews.facebook.net/D24657


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6ddb2f19
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6ddb2f19
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6ddb2f19

Branch: refs/heads/master
Commit: 6ddb2f196544e98918730be513e7a10d431d496f
Parents: fcf583e
Author: Elliott Clark <eclark@apache.org>
Authored: Tue Oct 7 15:08:54 2014 -0700
Committer: Elliott Clark <eclark@apache.org>
Committed: Fri Oct 10 09:51:09 2014 -0700

----------------------------------------------------------------------
 hbase-assembly/pom.xml                          |   10 +
 hbase-rest/pom.xml                              |  296 ++
 .../org/apache/hadoop/hbase/rest/Constants.java |   78 +
 .../hadoop/hbase/rest/ExistsResource.java       |   75 +
 .../apache/hadoop/hbase/rest/MetricsREST.java   |  103 +
 .../hadoop/hbase/rest/MultiRowResource.java     |  108 +
 .../hbase/rest/ProtobufMessageHandler.java      |   46 +
 .../hbase/rest/ProtobufStreamingUtil.java       |  102 +
 .../apache/hadoop/hbase/rest/RESTServer.java    |  253 ++
 .../apache/hadoop/hbase/rest/RESTServlet.java   |  151 +
 .../hadoop/hbase/rest/RESTServletContainer.java |   76 +
 .../hadoop/hbase/rest/RegionsResource.java      |  104 +
 .../apache/hadoop/hbase/rest/ResourceBase.java  |   96 +
 .../hadoop/hbase/rest/ResourceConfig.java       |   31 +
 .../hadoop/hbase/rest/ResultGenerator.java      |   50 +
 .../apache/hadoop/hbase/rest/RootResource.java  |  106 +
 .../apache/hadoop/hbase/rest/RowResource.java   |  598 +++
 .../hadoop/hbase/rest/RowResultGenerator.java   |  122 +
 .../org/apache/hadoop/hbase/rest/RowSpec.java   |  407 ++
 .../hbase/rest/ScannerInstanceResource.java     |  201 +
 .../hadoop/hbase/rest/ScannerResource.java      |  164 +
 .../hbase/rest/ScannerResultGenerator.java      |  191 +
 .../hadoop/hbase/rest/SchemaResource.java       |  248 ++
 .../rest/StorageClusterStatusResource.java      |  109 +
 .../rest/StorageClusterVersionResource.java     |   79 +
 .../apache/hadoop/hbase/rest/TableResource.java |  196 +
 .../hadoop/hbase/rest/TableScanResource.java    |  168 +
 .../hadoop/hbase/rest/VersionResource.java      |  104 +
 .../apache/hadoop/hbase/rest/client/Client.java |  525 +++
 .../hadoop/hbase/rest/client/Cluster.java       |  110 +
 .../hadoop/hbase/rest/client/RemoteAdmin.java   |  401 ++
 .../hadoop/hbase/rest/client/RemoteHTable.java  |  858 ++++
 .../hadoop/hbase/rest/client/Response.java      |  155 +
 .../hadoop/hbase/rest/filter/AuthFilter.java    |   82 +
 .../hbase/rest/filter/GZIPRequestStream.java    |   58 +
 .../hbase/rest/filter/GZIPRequestWrapper.java   |   52 +
 .../hbase/rest/filter/GZIPResponseStream.java   |   78 +
 .../hbase/rest/filter/GZIPResponseWrapper.java  |  147 +
 .../hadoop/hbase/rest/filter/GzipFilter.java    |   85 +
 .../hadoop/hbase/rest/model/CellModel.java      |  209 +
 .../hadoop/hbase/rest/model/CellSetModel.java   |  152 +
 .../hbase/rest/model/ColumnSchemaModel.java     |  241 ++
 .../hadoop/hbase/rest/model/RowModel.java       |  151 +
 .../hadoop/hbase/rest/model/ScannerModel.java   |  852 ++++
 .../rest/model/StorageClusterStatusModel.java   |  790 ++++
 .../rest/model/StorageClusterVersionModel.java  |   78 +
 .../hadoop/hbase/rest/model/TableInfoModel.java |  159 +
 .../hadoop/hbase/rest/model/TableListModel.java |  113 +
 .../hadoop/hbase/rest/model/TableModel.java     |   84 +
 .../hbase/rest/model/TableRegionModel.java      |  196 +
 .../hbase/rest/model/TableSchemaModel.java      |  360 ++
 .../hadoop/hbase/rest/model/VersionModel.java   |  209 +
 .../org/apache/hadoop/hbase/rest/package.html   | 1660 ++++++++
 .../rest/protobuf/generated/CellMessage.java    |  731 ++++
 .../rest/protobuf/generated/CellSetMessage.java | 1521 +++++++
 .../protobuf/generated/ColumnSchemaMessage.java | 1904 +++++++++
 .../rest/protobuf/generated/ScannerMessage.java | 1578 +++++++
 .../generated/StorageClusterStatusMessage.java  | 3955 ++++++++++++++++++
 .../protobuf/generated/TableInfoMessage.java    | 1802 ++++++++
 .../protobuf/generated/TableListMessage.java    |  547 +++
 .../protobuf/generated/TableSchemaMessage.java  | 2125 ++++++++++
 .../rest/protobuf/generated/VersionMessage.java | 1147 +++++
 .../rest/provider/JAXBContextResolver.java      |   89 +
 .../hbase/rest/provider/JacksonProvider.java    |   31 +
 .../consumer/ProtobufMessageBodyConsumer.java   |   88 +
 .../producer/PlainTextMessageBodyProducer.java  |   74 +
 .../producer/ProtobufMessageBodyProducer.java   |   81 +
 .../resources/hbase-webapps/rest/index.html     |   20 +
 .../main/resources/hbase-webapps/rest/rest.jsp  |  117 +
 .../org/apache/hadoop/hbase/rest/XMLSchema.xsd  |  181 +
 .../hbase/rest/protobuf/CellMessage.proto       |   25 +
 .../hbase/rest/protobuf/CellSetMessage.proto    |   28 +
 .../rest/protobuf/ColumnSchemaMessage.proto     |   31 +
 .../hbase/rest/protobuf/ScannerMessage.proto    |   32 +
 .../protobuf/StorageClusterStatusMessage.proto  |   51 +
 .../hbase/rest/protobuf/TableInfoMessage.proto  |   30 +
 .../hbase/rest/protobuf/TableListMessage.proto  |   22 +
 .../rest/protobuf/TableSchemaMessage.proto      |   33 +
 .../hbase/rest/protobuf/VersionMessage.proto    |   26 +
 .../apache/hadoop/hbase/rest/DummyFilter.java   |   64 +
 .../hbase/rest/HBaseRESTTestingUtility.java     |   98 +
 .../hbase/rest/PerformanceEvaluation.java       | 1524 +++++++
 .../hadoop/hbase/rest/RowResourceBase.java      |  482 +++
 .../apache/hadoop/hbase/rest/TestDeleteRow.java |   97 +
 .../hbase/rest/TestGZIPResponseWrapper.java     |  118 +
 .../hbase/rest/TestGetAndPutResource.java       |  583 +++
 .../hadoop/hbase/rest/TestGzipFilter.java       |  161 +
 .../hadoop/hbase/rest/TestMultiRowResource.java |  181 +
 .../hadoop/hbase/rest/TestResourceFilter.java   |   62 +
 .../hadoop/hbase/rest/TestScannerResource.java  |  357 ++
 .../hbase/rest/TestScannersWithFilters.java     | 1002 +++++
 .../hbase/rest/TestScannersWithLabels.java      |  241 ++
 .../hadoop/hbase/rest/TestSchemaResource.java   |  192 +
 .../hadoop/hbase/rest/TestStatusResource.java   |  135 +
 .../hadoop/hbase/rest/TestTableResource.java    |  264 ++
 .../apache/hadoop/hbase/rest/TestTableScan.java |  615 +++
 .../hadoop/hbase/rest/TestVersionResource.java  |  179 +
 .../rest/client/TestRemoteAdminRetries.java     |  166 +
 .../rest/client/TestRemoteHTableRetries.java    |  194 +
 .../hbase/rest/client/TestRemoteTable.java      |  539 +++
 .../hadoop/hbase/rest/model/TestCellModel.java  |   75 +
 .../hbase/rest/model/TestCellSetModel.java      |  139 +
 .../hbase/rest/model/TestColumnSchemaModel.java |   78 +
 .../hadoop/hbase/rest/model/TestModelBase.java  |  135 +
 .../hadoop/hbase/rest/model/TestRowModel.java   |   76 +
 .../hbase/rest/model/TestScannerModel.java      |  110 +
 .../model/TestStorageClusterStatusModel.java    |  145 +
 .../model/TestStorageClusterVersionModel.java   |   54 +
 .../hbase/rest/model/TestTableInfoModel.java    |   89 +
 .../hbase/rest/model/TestTableListModel.java    |   66 +
 .../hbase/rest/model/TestTableRegionModel.java  |   88 +
 .../hbase/rest/model/TestTableSchemaModel.java  |  110 +
 .../hbase/rest/model/TestVersionModel.java      |   71 +
 hbase-rest/src/test/resources/hbase-site.xml    |  150 +
 hbase-rest/src/test/resources/hdfs-site.xml     |   32 +
 hbase-rest/src/test/resources/log4j.properties  |   66 +
 hbase-rest/src/test/resources/mapred-queues.xml |   75 +
 hbase-rest/src/test/resources/mapred-site.xml   |   34 +
 hbase-rest/src/test/resources/zoo.cfg           |   43 +
 hbase-server/pom.xml                            |   51 -
 .../org/apache/hadoop/hbase/rest/Constants.java |   78 -
 .../hadoop/hbase/rest/ExistsResource.java       |   75 -
 .../apache/hadoop/hbase/rest/MetricsREST.java   |  103 -
 .../hadoop/hbase/rest/MultiRowResource.java     |  108 -
 .../hbase/rest/ProtobufMessageHandler.java      |   46 -
 .../hbase/rest/ProtobufStreamingUtil.java       |  102 -
 .../apache/hadoop/hbase/rest/RESTServer.java    |  253 --
 .../apache/hadoop/hbase/rest/RESTServlet.java   |  151 -
 .../hadoop/hbase/rest/RESTServletContainer.java |   76 -
 .../hadoop/hbase/rest/RegionsResource.java      |  104 -
 .../apache/hadoop/hbase/rest/ResourceBase.java  |   96 -
 .../hadoop/hbase/rest/ResourceConfig.java       |   31 -
 .../hadoop/hbase/rest/ResultGenerator.java      |   50 -
 .../apache/hadoop/hbase/rest/RootResource.java  |  106 -
 .../apache/hadoop/hbase/rest/RowResource.java   |  598 ---
 .../hadoop/hbase/rest/RowResultGenerator.java   |  122 -
 .../org/apache/hadoop/hbase/rest/RowSpec.java   |  407 --
 .../hbase/rest/ScannerInstanceResource.java     |  201 -
 .../hadoop/hbase/rest/ScannerResource.java      |  164 -
 .../hbase/rest/ScannerResultGenerator.java      |  191 -
 .../hadoop/hbase/rest/SchemaResource.java       |  248 --
 .../rest/StorageClusterStatusResource.java      |  109 -
 .../rest/StorageClusterVersionResource.java     |   79 -
 .../apache/hadoop/hbase/rest/TableResource.java |  196 -
 .../hadoop/hbase/rest/TableScanResource.java    |  168 -
 .../hadoop/hbase/rest/VersionResource.java      |  104 -
 .../apache/hadoop/hbase/rest/client/Client.java |  525 ---
 .../hadoop/hbase/rest/client/Cluster.java       |  103 -
 .../hadoop/hbase/rest/client/RemoteAdmin.java   |  401 --
 .../hadoop/hbase/rest/client/RemoteHTable.java  |  858 ----
 .../hadoop/hbase/rest/client/Response.java      |  155 -
 .../hadoop/hbase/rest/filter/AuthFilter.java    |   82 -
 .../hbase/rest/filter/GZIPRequestStream.java    |   58 -
 .../hbase/rest/filter/GZIPRequestWrapper.java   |   52 -
 .../hbase/rest/filter/GZIPResponseStream.java   |   78 -
 .../hbase/rest/filter/GZIPResponseWrapper.java  |  147 -
 .../hadoop/hbase/rest/filter/GzipFilter.java    |   85 -
 .../hadoop/hbase/rest/model/CellModel.java      |  209 -
 .../hadoop/hbase/rest/model/CellSetModel.java   |  152 -
 .../hbase/rest/model/ColumnSchemaModel.java     |  241 --
 .../hadoop/hbase/rest/model/RowModel.java       |  151 -
 .../hadoop/hbase/rest/model/ScannerModel.java   |  852 ----
 .../rest/model/StorageClusterStatusModel.java   |  790 ----
 .../rest/model/StorageClusterVersionModel.java  |   78 -
 .../hadoop/hbase/rest/model/TableInfoModel.java |  159 -
 .../hadoop/hbase/rest/model/TableListModel.java |  113 -
 .../hadoop/hbase/rest/model/TableModel.java     |   84 -
 .../hbase/rest/model/TableRegionModel.java      |  196 -
 .../hbase/rest/model/TableSchemaModel.java      |  360 --
 .../hadoop/hbase/rest/model/VersionModel.java   |  209 -
 .../org/apache/hadoop/hbase/rest/package.html   | 1660 --------
 .../rest/protobuf/generated/CellMessage.java    |  731 ----
 .../rest/protobuf/generated/CellSetMessage.java | 1521 -------
 .../protobuf/generated/ColumnSchemaMessage.java | 1904 ---------
 .../rest/protobuf/generated/ScannerMessage.java | 1578 -------
 .../generated/StorageClusterStatusMessage.java  | 3955 ------------------
 .../protobuf/generated/TableInfoMessage.java    | 1802 --------
 .../protobuf/generated/TableListMessage.java    |  547 ---
 .../protobuf/generated/TableSchemaMessage.java  | 2125 ----------
 .../rest/protobuf/generated/VersionMessage.java | 1147 -----
 .../rest/provider/JAXBContextResolver.java      |   89 -
 .../hbase/rest/provider/JacksonProvider.java    |   31 -
 .../consumer/ProtobufMessageBodyConsumer.java   |   88 -
 .../producer/PlainTextMessageBodyProducer.java  |   74 -
 .../producer/ProtobufMessageBodyProducer.java   |   81 -
 .../resources/hbase-webapps/rest/index.html     |   20 -
 .../main/resources/hbase-webapps/rest/rest.jsp  |  117 -
 .../org/apache/hadoop/hbase/rest/XMLSchema.xsd  |  181 -
 .../hbase/rest/protobuf/CellMessage.proto       |   25 -
 .../hbase/rest/protobuf/CellSetMessage.proto    |   28 -
 .../rest/protobuf/ColumnSchemaMessage.proto     |   31 -
 .../hbase/rest/protobuf/ScannerMessage.proto    |   32 -
 .../protobuf/StorageClusterStatusMessage.proto  |   51 -
 .../hbase/rest/protobuf/TableInfoMessage.proto  |   30 -
 .../hbase/rest/protobuf/TableListMessage.proto  |   22 -
 .../rest/protobuf/TableSchemaMessage.proto      |   33 -
 .../hbase/rest/protobuf/VersionMessage.proto    |   26 -
 .../apache/hadoop/hbase/rest/DummyFilter.java   |   64 -
 .../hbase/rest/HBaseRESTTestingUtility.java     |   98 -
 .../hbase/rest/PerformanceEvaluation.java       | 1524 -------
 .../hadoop/hbase/rest/RowResourceBase.java      |  482 ---
 .../apache/hadoop/hbase/rest/TestDeleteRow.java |   97 -
 .../hbase/rest/TestGZIPResponseWrapper.java     |  118 -
 .../hbase/rest/TestGetAndPutResource.java       |  583 ---
 .../hadoop/hbase/rest/TestGzipFilter.java       |  161 -
 .../hadoop/hbase/rest/TestMultiRowResource.java |  181 -
 .../hadoop/hbase/rest/TestResourceFilter.java   |   62 -
 .../hadoop/hbase/rest/TestScannerResource.java  |  357 --
 .../hbase/rest/TestScannersWithFilters.java     | 1002 -----
 .../hbase/rest/TestScannersWithLabels.java      |  241 --
 .../hadoop/hbase/rest/TestSchemaResource.java   |  192 -
 .../hadoop/hbase/rest/TestStatusResource.java   |  117 -
 .../hadoop/hbase/rest/TestTableResource.java    |  264 --
 .../apache/hadoop/hbase/rest/TestTableScan.java |  615 ---
 .../hadoop/hbase/rest/TestVersionResource.java  |  179 -
 .../rest/client/TestRemoteAdminRetries.java     |  166 -
 .../rest/client/TestRemoteHTableRetries.java    |  194 -
 .../hbase/rest/client/TestRemoteTable.java      |  539 ---
 .../hadoop/hbase/rest/model/TestCellModel.java  |   75 -
 .../hbase/rest/model/TestCellSetModel.java      |  139 -
 .../hbase/rest/model/TestColumnSchemaModel.java |   78 -
 .../hadoop/hbase/rest/model/TestModelBase.java  |  135 -
 .../hadoop/hbase/rest/model/TestRowModel.java   |   76 -
 .../hbase/rest/model/TestScannerModel.java      |  110 -
 .../model/TestStorageClusterStatusModel.java    |  145 -
 .../model/TestStorageClusterVersionModel.java   |   54 -
 .../hbase/rest/model/TestTableInfoModel.java    |   89 -
 .../hbase/rest/model/TestTableListModel.java    |   66 -
 .../hbase/rest/model/TestTableRegionModel.java  |   88 -
 .../hbase/rest/model/TestTableSchemaModel.java  |  110 -
 .../hbase/rest/model/TestVersionModel.java      |   71 -
 hbase-thrift/pom.xml                            |   46 +-
 pom.xml                                         |    1 +
 233 files changed, 37274 insertions(+), 36565 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-assembly/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml
index 0852642..4aa7759 100644
--- a/hbase-assembly/pom.xml
+++ b/hbase-assembly/pom.xml
@@ -102,6 +102,16 @@
        <version>${project.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-thrift</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-rest</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
        <groupId>org.apache.hbase</groupId>
        <artifactId>hbase-testing-util</artifactId>
        <version>${project.version}</version>

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
new file mode 100644
index 0000000..648e5f6
--- /dev/null
+++ b/hbase-rest/pom.xml
@@ -0,0 +1,296 @@
+<?xml version="1.0"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>hbase</artifactId>
+    <groupId>org.apache.hbase</groupId>
+    <version>2.0.0-SNAPSHOT</version>
+    <relativePath>..</relativePath>
+  </parent>
+  <artifactId>hbase-rest</artifactId>
+  <name>HBase - Rest</name>
+  <description>HBase Rest Server</description>
+
+  <build>
+    <!-- Makes sure the resources get added before they are processed
+      by placing this first -->
+    <resources>
+      <!-- Add the build webabpps to the classpth -->
+      <resource>
+        <directory>${project.build.directory}</directory>
+        <includes>
+          <include>hbase-webapps/**</include>
+        </includes>
+      </resource>
+    </resources>
+    <testResources>
+      <testResource>
+        <directory>src/test/resources</directory>
+        <includes>
+          <include>**/**</include>
+        </includes>
+      </testResource>
+    </testResources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-site-plugin</artifactId>
+        <configuration>
+          <skip>true</skip>
+        </configuration>
+      </plugin>
+      <plugin>
+        <!--Make it so assembly:single does nothing in here-->
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>${maven.assembly.version}</version>
+        <configuration>
+          <skipAssembly>true</skipAssembly>
+        </configuration>
+      </plugin>
+      <!-- Make a jar and put the sources in the jar -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-source-plugin</artifactId>
+      </plugin>
+
+      <!-- General ant tasks, bound to different build phases -->
+      <plugin>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <!-- Generate web app sources -->
+          <execution>
+            <id>generate</id>
+            <phase>generate-sources</phase>
+            <configuration>
+              <target>
+                <property name="build.webapps" location="${project.build.directory}/hbase-webapps"/>
+                <property name="src.webapps"
+                          location="${basedir}/src/main/resources/hbase-webapps"/>
+                <property name="generated.sources"
+                          location="${project.build.directory}/generated-sources"/>
+                <mkdir dir="${build.webapps}"/>
+                <copy todir="${build.webapps}">
+                  <fileset dir="${src.webapps}">
+                    <exclude name="**/*.jsp"/>
+                    <exclude name="**/.*"/>
+                    <exclude name="**/*~"/>
+                  </fileset>
+                </copy>
+                <!--The compile.classpath is passed in by maven -->
+                <taskdef classname="org.apache.jasper.JspC" name="jspcompiler"
+                         classpathref="maven.compile.classpath"/>
+                <mkdir dir="${build.webapps}/rest/WEB-INF"/>
+                <jspcompiler uriroot="${src.webapps}/rest" outputdir="${generated.sources}/java"
+                             package="org.apache.hadoop.hbase.generated.rest"
+                             webxml="${build.webapps}/rest/WEB-INF/web.xml"/>
+              </target>
+            </configuration>
+            <goals>
+              <goal>run</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <executions>
+          <!-- Add the generated sources -->
+          <execution>
+            <id>jspcSource-packageInfo-source</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>${project.build.directory}/generated-sources/java</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <!-- Testing plugins -->
+      <plugin>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <properties>
+            <property>
+              <name>listener</name>
+              <value>org.apache.hadoop.hbase.ServerResourceCheckerJUnitListener</value>
+            </property>
+          </properties>
+          <systemPropertyVariables>
+            <test.build.webapps>target/test-classes/webapps</test.build.webapps>
+          </systemPropertyVariables>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <!-- Intra-project dependencies -->
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <type>jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-annotations</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-client</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>${compat.module}</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-testing-util</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-annotations</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>jsp-2.1</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>tomcat</groupId>
+      <artifactId>jasper-compiler</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>tomcat</groupId>
+      <artifactId>jasper-runtime</artifactId>
+    </dependency>
+    <!-- REST dependencies -->
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-core</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-json</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-server</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>javax.xml.bind</groupId>
+      <artifactId>jaxb-api</artifactId>
+    </dependency>
+  </dependencies>
+  <profiles>
+    <!-- Skip the tests in this module -->
+    <profile>
+      <id>skipRestTets</id>
+      <activation>
+        <property>
+          <name>skipRestTests</name>
+        </property>
+      </activation>
+      <properties>
+        <surefire.skipFirstPart>true</surefire.skipFirstPart>
+        <surefire.skipSecondPart>true</surefire.skipSecondPart>
+      </properties>
+    </profile>
+    <profile>
+      <id>compile-protobuf</id>
+      <activation>
+        <property>
+          <name>compile-protobuf</name>
+        </property>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-maven-plugins</artifactId>
+            <executions>
+              <execution>
+                <id>compile-protoc</id>
+                <phase>generate-sources</phase>
+                <goals>
+                  <goal>protoc</goal>
+                </goals>
+                <configuration>
+                  <imports>
+                    <param>${basedir}/src/main/resources/org/apache/hadoop/hbase/rest/protobuf
+                    </param>
+                  </imports>
+                  <source>
+                    <!-- These should be under src/main/protobuf -->
+                    <directory>${basedir}/src/main/resources/org/apache/hadoop/hbase/rest/protobuf
+                    </directory>
+                    <includes>
+                      <include>CellMessage.proto</include>
+                      <include>CellSetMessage.proto</include>
+                      <include>ColumnSchemaMessage.proto</include>
+                      <include>ScannerMessage.proto</include>
+                      <include>StorageClusterStatusMessage.proto</include>
+                      <include>TableInfoMessage.proto</include>
+                      <include>TableListMessage.proto</include>
+                      <include>TableSchemaMessage.proto</include>
+                      <include>VersionMessage.proto</include>
+                    </includes>
+                  </source>
+                  <output>${basedir}/src/main/java/</output>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+</project>

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java
new file mode 100644
index 0000000..505dbb3
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+
+/**
+ * Common constants for org.apache.hadoop.hbase.rest
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public interface Constants {
+  // All constants in a public interface are 'public static final'
+
+  String VERSION_STRING = "0.0.3";
+
+  int DEFAULT_MAX_AGE = 60 * 60 * 4;  // 4 hours
+
+  int DEFAULT_LISTEN_PORT = 8080;
+
+  String MIMETYPE_TEXT = "text/plain";
+  String MIMETYPE_HTML = "text/html";
+  String MIMETYPE_XML = "text/xml";
+  String MIMETYPE_BINARY = "application/octet-stream";
+  String MIMETYPE_PROTOBUF = "application/x-protobuf";
+  String MIMETYPE_PROTOBUF_IETF = "application/protobuf";
+  String MIMETYPE_JSON = "application/json";
+
+  String CRLF = "\r\n";
+
+  String REST_KEYTAB_FILE = "hbase.rest.keytab.file";
+  String REST_KERBEROS_PRINCIPAL = "hbase.rest.kerberos.principal";
+  String REST_AUTHENTICATION_TYPE = "hbase.rest.authentication.type";
+  String REST_AUTHENTICATION_PRINCIPAL = "hbase.rest.authentication.kerberos.principal";
+
+  String REST_SSL_ENABLED = "hbase.rest.ssl.enabled";
+  String REST_SSL_KEYSTORE_STORE = "hbase.rest.ssl.keystore.store";
+  String REST_SSL_KEYSTORE_PASSWORD = "hbase.rest.ssl.keystore.password";
+  String REST_SSL_KEYSTORE_KEYPASSWORD = "hbase.rest.ssl.keystore.keypassword";
+
+  String REST_DNS_NAMESERVER = "hbase.rest.dns.nameserver";
+  String REST_DNS_INTERFACE = "hbase.rest.dns.interface";
+
+  String FILTER_CLASSES = "hbase.rest.filter.classes";
+  String SCAN_START_ROW = "startrow";
+  String SCAN_END_ROW = "endrow";
+  String SCAN_COLUMN = "column";
+  String SCAN_START_TIME = "starttime";
+  String SCAN_END_TIME = "endtime";
+  String SCAN_MAX_VERSIONS = "maxversions";
+  String SCAN_BATCH_SIZE = "batchsize";
+  String SCAN_LIMIT = "limit";
+  String SCAN_FETCH_SIZE = "hbase.rest.scan.fetchsize";
+  String SCAN_FILTER = "filter"; 
+  String CUSTOM_FILTERS = "hbase.rest.custom.filters"; 
+
+  String ROW_KEYS_PARAM_NAME = "row";
+  /** If this query parameter is present when processing row or scanner resources,
+      it disables server side block caching */
+  String NOCACHE_PARAM_NAME = "nocache";
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java
new file mode 100644
index 0000000..90b3302
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java
@@ -0,0 +1,75 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.CacheControl;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import javax.ws.rs.core.Response.ResponseBuilder;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
+public class ExistsResource extends ResourceBase {
+
+  static CacheControl cacheControl;
+  static {
+    cacheControl = new CacheControl();
+    cacheControl.setNoCache(true);
+    cacheControl.setNoTransform(false);
+  }
+
+  TableResource tableResource;
+
+  /**
+   * Constructor
+   * @param tableResource
+   * @throws IOException
+   */
+  public ExistsResource(TableResource tableResource) throws IOException {
+    super();
+    this.tableResource = tableResource;
+  }
+
+  @GET
+  @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
+    MIMETYPE_PROTOBUF_IETF, MIMETYPE_BINARY})
+  public Response get(final @Context UriInfo uriInfo) {
+    try {
+      if (!tableResource.exists()) {
+        return Response.status(Response.Status.NOT_FOUND)
+          .type(MIMETYPE_TEXT).entity("Not found" + CRLF)
+          .build();
+      }
+    } catch (IOException e) {
+      return Response.status(Response.Status.SERVICE_UNAVAILABLE)
+        .type(MIMETYPE_TEXT).entity("Unavailable" + CRLF)
+        .build();
+    }
+    ResponseBuilder response = Response.ok();
+    response.cacheControl(cacheControl);
+    return response.build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MetricsREST.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MetricsREST.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MetricsREST.java
new file mode 100644
index 0000000..e31037a
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MetricsREST.java
@@ -0,0 +1,103 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
+
+import org.apache.hadoop.hbase.rest.MetricsRESTSource;
+
+@InterfaceAudience.Private
+public class MetricsREST {
+
+  public MetricsRESTSource getSource() {
+    return source;
+  }
+
+  private MetricsRESTSource source;
+
+  public MetricsREST() {
+     source = CompatibilitySingletonFactory.getInstance(MetricsRESTSource.class);
+  }
+  
+  /**
+   * @param inc How much to add to requests.
+   */
+  public void incrementRequests(final int inc) {
+    source.incrementRequests(inc);
+  }
+  
+  /**
+   * @param inc How much to add to sucessfulGetCount.
+   */
+  public void incrementSucessfulGetRequests(final int inc) {
+    source.incrementSucessfulGetRequests(inc);
+  }
+  
+  /**
+   * @param inc How much to add to sucessfulPutCount.
+   */
+  public void incrementSucessfulPutRequests(final int inc) {
+    source.incrementSucessfulPutRequests(inc);
+  }
+
+  /**
+   * @param inc How much to add to failedPutCount.
+   */
+  public void incrementFailedPutRequests(final int inc) {
+    source.incrementFailedPutRequests(inc);
+  }
+  
+  /**
+   * @param inc How much to add to failedGetCount.
+   */
+  public void incrementFailedGetRequests(final int inc) {
+    source.incrementFailedGetRequests(inc);
+  }
+
+  /**
+   * @param inc How much to add to sucessfulDeleteCount.
+   */
+  public void incrementSucessfulDeleteRequests(final int inc) {
+    source.incrementSucessfulDeleteRequests(inc);
+  }
+
+  /**
+   * @param inc How much to add to failedDeleteCount.
+   */
+  public void incrementFailedDeleteRequests(final int inc) {
+    source.incrementFailedDeleteRequests(inc);
+  }
+
+  /**
+   * @param inc How much to add to sucessfulScanCount.
+   */
+  public synchronized void incrementSucessfulScanRequests(final int inc) {
+    source.incrementSucessfulScanRequests(inc);
+  }
+
+  /**
+   * @param inc How much to add to failedScanCount.
+   */
+  public void incrementFailedScanRequests(final int inc) {
+    source.incrementFailedScanRequests(inc);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
new file mode 100644
index 0000000..c88ac91
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
@@ -0,0 +1,108 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.rest.model.CellModel;
+import org.apache.hadoop.hbase.rest.model.CellSetModel;
+import org.apache.hadoop.hbase.rest.model.RowModel;
+
+@InterfaceAudience.Private
+public class MultiRowResource extends ResourceBase implements Constants {
+  private static final Log LOG = LogFactory.getLog(MultiRowResource.class);
+
+  TableResource tableResource;
+  Integer versions = null;
+
+  /**
+   * Constructor
+   *
+   * @param tableResource
+   * @param versions
+   * @throws java.io.IOException
+   */
+  public MultiRowResource(TableResource tableResource, String versions) throws IOException {
+    super();
+    this.tableResource = tableResource;
+
+    if (versions != null) {
+      this.versions = Integer.valueOf(versions);
+
+    }
+  }
+
+  @GET
+  @Produces({ MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF, MIMETYPE_PROTOBUF_IETF })
+  public Response get(final @Context UriInfo uriInfo) {
+    MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
+
+    servlet.getMetrics().incrementRequests(1);
+    try {
+      CellSetModel model = new CellSetModel();
+      for (String rk : params.get(ROW_KEYS_PARAM_NAME)) {
+        RowSpec rowSpec = new RowSpec(rk);
+
+        if (this.versions != null) {
+          rowSpec.setMaxVersions(this.versions);
+        }
+        ResultGenerator generator =
+          ResultGenerator.fromRowSpec(this.tableResource.getName(), rowSpec, null,
+            !params.containsKey(NOCACHE_PARAM_NAME));
+        Cell value = null;
+        RowModel rowModel = new RowModel(rk);
+        if (generator.hasNext()) {
+          while ((value = generator.next()) != null) {
+            rowModel.addCell(new CellModel(CellUtil.cloneFamily(value), CellUtil
+                .cloneQualifier(value), value.getTimestamp(), CellUtil.cloneValue(value)));
+          }
+          model.addRow(rowModel);
+        } else {
+          LOG.trace("The row : " + rk + " not found in the table.");
+        }
+      }
+
+      if (model.getRows().size() == 0) {
+      //If no rows found.
+        servlet.getMetrics().incrementFailedGetRequests(1);
+        return Response.status(Response.Status.NOT_FOUND)
+            .type(MIMETYPE_TEXT).entity("No rows found." + CRLF)
+            .build();
+      } else {
+        servlet.getMetrics().incrementSucessfulGetRequests(1);
+        return Response.ok(model).build();
+      }
+    } catch (Exception e) {
+      servlet.getMetrics().incrementFailedGetRequests(1);
+      return processException(e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufMessageHandler.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufMessageHandler.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufMessageHandler.java
new file mode 100644
index 0000000..bbaf1f7
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufMessageHandler.java
@@ -0,0 +1,46 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+/**
+ * Common interface for models capable of supporting protobuf marshalling
+ * and unmarshalling. Hooks up to the ProtobufMessageBodyConsumer and
+ * ProtobufMessageBodyProducer adapters. 
+ */
+@InterfaceAudience.Private
+public interface ProtobufMessageHandler {
+  /**
+   * @return the protobuf represention of the model
+   */
+  byte[] createProtobufOutput();
+
+  /**
+   * Initialize the model from a protobuf representation.
+   * @param message the raw bytes of the protobuf message
+   * @return reference to self for convenience
+   * @throws IOException
+   */
+  ProtobufMessageHandler getObjectFromMessage(byte[] message)
+    throws IOException;
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
new file mode 100644
index 0000000..93bb940
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.List;
+
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.StreamingOutput;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.rest.model.CellModel;
+import org.apache.hadoop.hbase.rest.model.CellSetModel;
+import org.apache.hadoop.hbase.rest.model.RowModel;
+import org.apache.hadoop.hbase.util.Bytes;
+
+
+public class ProtobufStreamingUtil implements StreamingOutput {
+
+  private static final Log LOG = LogFactory.getLog(ProtobufStreamingUtil.class);
+  private String contentType;
+  private ResultScanner resultScanner;
+  private int limit;
+  private int fetchSize;
+
+  protected ProtobufStreamingUtil(ResultScanner scanner, String type, int limit, int fetchSize) {
+    this.resultScanner = scanner;
+    this.contentType = type;
+    this.limit = limit;
+    this.fetchSize = fetchSize;
+    LOG.debug("Created ScanStreamingUtil with content type = " + this.contentType + " user limit : "
+        + this.limit + " scan fetch size : " + this.fetchSize);
+  }
+
+  @Override
+  public void write(OutputStream outStream) throws IOException, WebApplicationException {
+    Result[] rowsToSend;
+    if(limit < fetchSize){
+      rowsToSend = this.resultScanner.next(limit);
+      writeToStream(createModelFromResults(rowsToSend), this.contentType, outStream);
+    } else {
+      int count = limit;
+      while (count > 0) {
+        if (count < fetchSize) {
+          rowsToSend = this.resultScanner.next(count);
+        } else {
+          rowsToSend = this.resultScanner.next(this.fetchSize);
+        }
+        if(rowsToSend.length == 0){
+          break;
+        }
+        count = count - rowsToSend.length;
+        writeToStream(createModelFromResults(rowsToSend), this.contentType, outStream);
+      }
+    }
+  }
+
+  private void writeToStream(CellSetModel model, String contentType, OutputStream outStream)
+      throws IOException {
+    byte[] objectBytes = model.createProtobufOutput();
+    outStream.write(Bytes.toBytes((short)objectBytes.length));
+    outStream.write(objectBytes);
+    outStream.flush();
+    LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+  }
+
+  private CellSetModel createModelFromResults(Result[] results) {
+    CellSetModel cellSetModel = new CellSetModel();
+    for (Result rs : results) {
+      byte[] rowKey = rs.getRow();
+      RowModel rModel = new RowModel(rowKey);
+      List<Cell> kvs = rs.listCells();
+      for (Cell kv : kvs) {
+        rModel.addCell(new CellModel(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv), kv
+            .getTimestamp(), CellUtil.cloneValue(kv)));
+      }
+      cellSetModel.addRow(rModel);
+    }
+    return cellSetModel;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
new file mode 100644
index 0000000..9f59b06
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -0,0 +1,253 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.cli.PosixParser;
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.http.InfoServer;
+import org.apache.hadoop.hbase.rest.filter.AuthFilter;
+import org.apache.hadoop.hbase.security.UserProvider;
+import org.apache.hadoop.hbase.util.HttpServerUtil;
+import org.apache.hadoop.hbase.util.Strings;
+import org.apache.hadoop.hbase.util.VersionInfo;
+import org.apache.hadoop.net.DNS;
+import org.mortbay.jetty.Connector;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.nio.SelectChannelConnector;
+import org.mortbay.jetty.security.SslSelectChannelConnector;
+import org.mortbay.jetty.servlet.Context;
+import org.mortbay.jetty.servlet.FilterHolder;
+import org.mortbay.jetty.servlet.ServletHolder;
+import org.mortbay.thread.QueuedThreadPool;
+
+import com.google.common.base.Preconditions;
+import com.sun.jersey.api.json.JSONConfiguration;
+import com.sun.jersey.spi.container.servlet.ServletContainer;
+
+/**
+ * Main class for launching REST gateway as a servlet hosted by Jetty.
+ * <p>
+ * The following options are supported:
+ * <ul>
+ * <li>-p --port : service port</li>
+ * <li>-ro --readonly : server mode</li>
+ * </ul>
+ */
+@InterfaceAudience.Private
+public class RESTServer implements Constants {
+
+  private static void printUsageAndExit(Options options, int exitCode) {
+    HelpFormatter formatter = new HelpFormatter();
+    formatter.printHelp("bin/hbase rest start", "", options,
+      "\nTo run the REST server as a daemon, execute " +
+      "bin/hbase-daemon.sh start|stop rest [--infoport <port>] [-p <port>] [-ro]\n", true);
+    System.exit(exitCode);
+  }
+
+  /**
+   * The main method for the HBase rest server.
+   * @param args command-line arguments
+   * @throws Exception exception
+   */
+  public static void main(String[] args) throws Exception {
+    Log LOG = LogFactory.getLog("RESTServer");
+
+    VersionInfo.logVersion();
+    FilterHolder authFilter = null;
+    Configuration conf = HBaseConfiguration.create();
+    Class<? extends ServletContainer> containerClass = ServletContainer.class;
+    UserProvider userProvider = UserProvider.instantiate(conf);
+    // login the server principal (if using secure Hadoop)
+    if (userProvider.isHadoopSecurityEnabled() && userProvider.isHBaseSecurityEnabled()) {
+      String machineName = Strings.domainNamePointerToHostName(
+        DNS.getDefaultHost(conf.get(REST_DNS_INTERFACE, "default"),
+          conf.get(REST_DNS_NAMESERVER, "default")));
+      String keytabFilename = conf.get(REST_KEYTAB_FILE);
+      Preconditions.checkArgument(keytabFilename != null && !keytabFilename.isEmpty(),
+        REST_KEYTAB_FILE + " should be set if security is enabled");
+      String principalConfig = conf.get(REST_KERBEROS_PRINCIPAL);
+      Preconditions.checkArgument(principalConfig != null && !principalConfig.isEmpty(),
+        REST_KERBEROS_PRINCIPAL + " should be set if security is enabled");
+      userProvider.login(REST_KEYTAB_FILE, REST_KERBEROS_PRINCIPAL, machineName);
+      if (conf.get(REST_AUTHENTICATION_TYPE) != null) {
+        containerClass = RESTServletContainer.class;
+        authFilter = new FilterHolder();
+        authFilter.setClassName(AuthFilter.class.getName());
+        authFilter.setName("AuthenticationFilter");
+      }
+    }
+
+    RESTServlet servlet = RESTServlet.getInstance(conf, userProvider);
+
+    Options options = new Options();
+    options.addOption("p", "port", true, "Port to bind to [default: 8080]");
+    options.addOption("ro", "readonly", false, "Respond only to GET HTTP " +
+      "method requests [default: false]");
+    options.addOption(null, "infoport", true, "Port for web UI");
+
+    CommandLine commandLine = null;
+    try {
+      commandLine = new PosixParser().parse(options, args);
+    } catch (ParseException e) {
+      LOG.error("Could not parse: ", e);
+      printUsageAndExit(options, -1);
+    }
+
+    // check for user-defined port setting, if so override the conf
+    if (commandLine != null && commandLine.hasOption("port")) {
+      String val = commandLine.getOptionValue("port");
+      servlet.getConfiguration()
+          .setInt("hbase.rest.port", Integer.valueOf(val));
+      LOG.debug("port set to " + val);
+    }
+
+    // check if server should only process GET requests, if so override the conf
+    if (commandLine != null && commandLine.hasOption("readonly")) {
+      servlet.getConfiguration().setBoolean("hbase.rest.readonly", true);
+      LOG.debug("readonly set to true");
+    }
+
+    // check for user-defined info server port setting, if so override the conf
+    if (commandLine != null && commandLine.hasOption("infoport")) {
+      String val = commandLine.getOptionValue("infoport");
+      servlet.getConfiguration()
+          .setInt("hbase.rest.info.port", Integer.valueOf(val));
+      LOG.debug("Web UI port set to " + val);
+    }
+
+    @SuppressWarnings("unchecked")
+    List<String> remainingArgs = commandLine != null ?
+        commandLine.getArgList() : new ArrayList<String>();
+    if (remainingArgs.size() != 1) {
+      printUsageAndExit(options, 1);
+    }
+
+    String command = remainingArgs.get(0);
+    if ("start".equals(command)) {
+      // continue and start container
+    } else if ("stop".equals(command)) {
+      System.exit(1);
+    } else {
+      printUsageAndExit(options, 1);
+    }
+
+    // set up the Jersey servlet container for Jetty
+    ServletHolder sh = new ServletHolder(containerClass);
+    sh.setInitParameter(
+      "com.sun.jersey.config.property.resourceConfigClass",
+      ResourceConfig.class.getCanonicalName());
+    sh.setInitParameter("com.sun.jersey.config.property.packages",
+      "jetty");
+    // The servlet holder below is instantiated to only handle the case
+    // of the /status/cluster returning arrays of nodes (live/dead). Without
+    // this servlet holder, the problem is that the node arrays in the response
+    // are collapsed to single nodes. We want to be able to treat the
+    // node lists as POJO in the response to /status/cluster servlet call,
+    // but not change the behavior for any of the other servlets
+    // Hence we don't use the servlet holder for all servlets / paths
+    ServletHolder shPojoMap = new ServletHolder(containerClass);
+    @SuppressWarnings("unchecked")
+    Map<String, String> shInitMap = sh.getInitParameters();
+    for (Entry<String, String> e : shInitMap.entrySet()) {
+      shPojoMap.setInitParameter(e.getKey(), e.getValue());
+    }
+    shPojoMap.setInitParameter(JSONConfiguration.FEATURE_POJO_MAPPING, "true");
+
+    // set up Jetty and run the embedded server
+
+    Server server = new Server();
+
+    Connector connector = new SelectChannelConnector();
+    if(conf.getBoolean(REST_SSL_ENABLED, false)) {
+      SslSelectChannelConnector sslConnector = new SslSelectChannelConnector();
+      String keystore = conf.get(REST_SSL_KEYSTORE_STORE);
+      String password = HBaseConfiguration.getPassword(conf,
+        REST_SSL_KEYSTORE_PASSWORD, null);
+      String keyPassword = HBaseConfiguration.getPassword(conf,
+        REST_SSL_KEYSTORE_KEYPASSWORD, password);
+      sslConnector.setKeystore(keystore);
+      sslConnector.setPassword(password);
+      sslConnector.setKeyPassword(keyPassword);
+      connector = sslConnector;
+    }
+    connector.setPort(servlet.getConfiguration().getInt("hbase.rest.port", 8080));
+    connector.setHost(servlet.getConfiguration().get("hbase.rest.host", "0.0.0.0"));
+
+    server.addConnector(connector);
+
+    // Set the default max thread number to 100 to limit
+    // the number of concurrent requests so that REST server doesn't OOM easily.
+    // Jetty set the default max thread number to 250, if we don't set it.
+    //
+    // Our default min thread number 2 is the same as that used by Jetty.
+    int maxThreads = servlet.getConfiguration().getInt("hbase.rest.threads.max", 100);
+    int minThreads = servlet.getConfiguration().getInt("hbase.rest.threads.min", 2);
+    QueuedThreadPool threadPool = new QueuedThreadPool(maxThreads);
+    threadPool.setMinThreads(minThreads);
+    server.setThreadPool(threadPool);
+
+    server.setSendServerVersion(false);
+    server.setSendDateHeader(false);
+    server.setStopAtShutdown(true);
+      // set up context
+    Context context = new Context(server, "/", Context.SESSIONS);
+    context.addServlet(shPojoMap, "/status/cluster");
+    context.addServlet(sh, "/*");
+    if (authFilter != null) {
+      context.addFilter(authFilter, "/*", 1);
+    }
+
+    // Load filters from configuration.
+    String[] filterClasses = servlet.getConfiguration().getStrings(FILTER_CLASSES,
+      ArrayUtils.EMPTY_STRING_ARRAY);
+    for (String filter : filterClasses) {
+      filter = filter.trim();
+      context.addFilter(Class.forName(filter), "/*", 0);
+    }
+    HttpServerUtil.constrainHttpMethods(context);
+
+    // Put up info server.
+    int port = conf.getInt("hbase.rest.info.port", 8085);
+    if (port >= 0) {
+      conf.setLong("startcode", System.currentTimeMillis());
+      String a = conf.get("hbase.rest.info.bindAddress", "0.0.0.0");
+      InfoServer infoServer = new InfoServer("rest", a, port, false, conf);
+      infoServer.setAttribute("hbase.conf", conf);
+      infoServer.start();
+    }
+
+    // start server
+    server.start();
+    server.join();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
new file mode 100644
index 0000000..ff42271
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.filter.ParseFilter;
+import org.apache.hadoop.hbase.security.UserProvider;
+import org.apache.hadoop.hbase.util.ConnectionCache;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.ProxyUsers;
+import org.apache.log4j.Logger;
+
+/**
+ * Singleton class encapsulating global REST servlet state and functions.
+ */
+@InterfaceAudience.Private
+public class RESTServlet implements Constants {
+  private static Logger LOG = Logger.getLogger(RESTServlet.class);
+  private static RESTServlet INSTANCE;
+  private final Configuration conf;
+  private final MetricsREST metrics = new MetricsREST();
+  private final ConnectionCache connectionCache;
+  private final UserGroupInformation realUser;
+
+  static final String CLEANUP_INTERVAL = "hbase.rest.connection.cleanup-interval";
+  static final String MAX_IDLETIME = "hbase.rest.connection.max-idletime";
+  static final String HBASE_REST_SUPPORT_PROXYUSER = "hbase.rest.support.proxyuser";
+
+  UserGroupInformation getRealUser() {
+    return realUser;
+  }
+
+  /**
+   * @return the RESTServlet singleton instance
+   */
+  public synchronized static RESTServlet getInstance() {
+    assert(INSTANCE != null);
+    return INSTANCE;
+  }
+
+  /**
+   * @param conf Existing configuration to use in rest servlet
+   * @param userProvider the login user provider
+   * @return the RESTServlet singleton instance
+   * @throws IOException
+   */
+  public synchronized static RESTServlet getInstance(Configuration conf,
+      UserProvider userProvider) throws IOException {
+    if (INSTANCE == null) {
+      INSTANCE = new RESTServlet(conf, userProvider);
+    }
+    return INSTANCE;
+  }
+
+  public synchronized static void stop() {
+    if (INSTANCE != null)  INSTANCE = null;
+  }
+
+  /**
+   * Constructor with existing configuration
+   * @param conf existing configuration
+   * @param userProvider the login user provider
+   * @throws IOException
+   */
+  RESTServlet(final Configuration conf,
+      final UserProvider userProvider) throws IOException {
+    this.realUser = userProvider.getCurrent().getUGI();
+    this.conf = conf;
+    registerCustomFilter(conf);
+
+    int cleanInterval = conf.getInt(CLEANUP_INTERVAL, 10 * 1000);
+    int maxIdleTime = conf.getInt(MAX_IDLETIME, 10 * 60 * 1000);
+    connectionCache = new ConnectionCache(
+      conf, userProvider, cleanInterval, maxIdleTime);
+    if (supportsProxyuser()) {
+      ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+    }
+  }
+
+  HBaseAdmin getAdmin() throws IOException {
+    return connectionCache.getAdmin();
+  }
+
+  /**
+   * Caller closes the table afterwards.
+   */
+  Table getTable(String tableName) throws IOException {
+    return connectionCache.getTable(tableName);
+  }
+
+  Configuration getConfiguration() {
+    return conf;
+  }
+
+  MetricsREST getMetrics() {
+    return metrics;
+  }
+
+  /**
+   * Helper method to determine if server should
+   * only respond to GET HTTP method requests.
+   * @return boolean for server read-only state
+   */
+  boolean isReadOnly() {
+    return getConfiguration().getBoolean("hbase.rest.readonly", false);
+  }
+
+  void setEffectiveUser(String effectiveUser) {
+    connectionCache.setEffectiveUser(effectiveUser);
+  }
+
+  boolean supportsProxyuser() {
+    return conf.getBoolean(HBASE_REST_SUPPORT_PROXYUSER, false);
+  }
+
+  private void registerCustomFilter(Configuration conf) {
+    String[] filterList = conf.getStrings(Constants.CUSTOM_FILTERS);
+    if (filterList != null) {
+      for (String filterClass : filterList) {
+        String[] filterPart = filterClass.split(":");
+        if (filterPart.length != 2) {
+          LOG.warn(
+            "Invalid filter specification " + filterClass + " - skipping");
+        } else {
+          ParseFilter.registerFilter(filterPart[0], filterPart[1]);
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServletContainer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServletContainer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServletContainer.java
new file mode 100644
index 0000000..2ce8ede
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServletContainer.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+import com.sun.jersey.spi.container.servlet.ServletContainer;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.AuthorizationException;
+import org.apache.hadoop.security.authorize.ProxyUsers;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * REST servlet container. It is used to get the remote request user
+ * without going through @HttpContext, so that we can minimize code changes.
+ */
+@InterfaceAudience.Private
+public class RESTServletContainer extends ServletContainer {
+  private static final long serialVersionUID = -2474255003443394314L;
+
+  /**
+   * This container is used only if authentication and
+   * impersonation is enabled. The remote request user is used
+   * as a proxy user for impersonation in invoking any REST service.
+   */
+  @Override
+  public void service(final HttpServletRequest request,
+      final HttpServletResponse response) throws ServletException, IOException {
+    final String doAsUserFromQuery = request.getParameter("doAs");
+    RESTServlet servlet = RESTServlet.getInstance();
+    if (doAsUserFromQuery != null) {
+      Configuration conf = servlet.getConfiguration();
+      if (!servlet.supportsProxyuser()) {
+        throw new ServletException("Support for proxyuser is not configured");
+      }
+      UserGroupInformation ugi = servlet.getRealUser();
+      // create and attempt to authorize a proxy user (the client is attempting
+      // to do proxy user)
+      ugi = UserGroupInformation.createProxyUser(doAsUserFromQuery, ugi);
+      // validate the proxy user authorization
+      try {
+        ProxyUsers.authorize(ugi, request.getRemoteAddr(), conf);
+      } catch(AuthorizationException e) {
+        throw new ServletException(e.getMessage());
+      }
+      servlet.setEffectiveUser(doAsUserFromQuery);
+    } else {
+      String effectiveUser = request.getRemoteUser();
+      servlet.setEffectiveUser(effectiveUser);
+    }
+    super.service(request, response);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
new file mode 100644
index 0000000..ddc2f56
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
@@ -0,0 +1,104 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+import java.util.Map;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.CacheControl;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import javax.ws.rs.core.Response.ResponseBuilder;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableNotFoundException;
+import org.apache.hadoop.hbase.client.MetaScanner;
+import org.apache.hadoop.hbase.rest.model.TableInfoModel;
+import org.apache.hadoop.hbase.rest.model.TableRegionModel;
+
+@InterfaceAudience.Private
+public class RegionsResource extends ResourceBase {
+  private static final Log LOG = LogFactory.getLog(RegionsResource.class);
+
+  static CacheControl cacheControl;
+  static {
+    cacheControl = new CacheControl();
+    cacheControl.setNoCache(true);
+    cacheControl.setNoTransform(false);
+  }
+
+  TableResource tableResource;
+
+  /**
+   * Constructor
+   * @param tableResource
+   * @throws IOException
+   */
+  public RegionsResource(TableResource tableResource) throws IOException {
+    super();
+    this.tableResource = tableResource;
+  }
+
+  @GET
+  @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
+    MIMETYPE_PROTOBUF_IETF})
+  public Response get(final @Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("GET " + uriInfo.getAbsolutePath());
+    }
+    servlet.getMetrics().incrementRequests(1);
+    try {
+      TableName tableName = TableName.valueOf(tableResource.getName());
+      TableInfoModel model = new TableInfoModel(tableName.getNameAsString());
+      Map<HRegionInfo,ServerName> regions = MetaScanner.allTableRegions(
+        servlet.getConfiguration(), null, tableName, false);
+      for (Map.Entry<HRegionInfo,ServerName> e: regions.entrySet()) {
+        HRegionInfo hri = e.getKey();
+        ServerName addr = e.getValue();
+        model.add(
+          new TableRegionModel(tableName.getNameAsString(), hri.getRegionId(),
+            hri.getStartKey(), hri.getEndKey(), addr.getHostAndPort()));
+      }
+      ResponseBuilder response = Response.ok(model);
+      response.cacheControl(cacheControl);
+      servlet.getMetrics().incrementSucessfulGetRequests(1);
+      return response.build();
+    } catch (TableNotFoundException e) {
+      servlet.getMetrics().incrementFailedGetRequests(1);
+      return Response.status(Response.Status.NOT_FOUND)
+        .type(MIMETYPE_TEXT).entity("Not found" + CRLF)
+        .build();
+    } catch (IOException e) {
+      servlet.getMetrics().incrementFailedGetRequests(1);
+      return Response.status(Response.Status.SERVICE_UNAVAILABLE)
+        .type(MIMETYPE_TEXT).entity("Unavailable" + CRLF)
+        .build();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java
new file mode 100644
index 0000000..f71d848
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java
@@ -0,0 +1,96 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Response;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.TableNotFoundException;
+import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
+import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException;
+import org.apache.hadoop.util.StringUtils;
+
+@InterfaceAudience.Private
+public class ResourceBase implements Constants {
+
+  RESTServlet servlet;
+  Class<?>  accessDeniedClazz;
+
+  public ResourceBase() throws IOException {
+    servlet = RESTServlet.getInstance();
+    try {
+      accessDeniedClazz = Class.forName("org.apache.hadoop.hbase.security.AccessDeniedException");
+    } catch (ClassNotFoundException e) {
+    }
+  }
+  
+  protected Response processException(Throwable exp) {
+    Throwable curr = exp;
+    if(accessDeniedClazz != null) {
+      //some access denied exceptions are buried
+      while (curr != null) {
+        if(accessDeniedClazz.isAssignableFrom(curr.getClass())) {
+          throw new WebApplicationException(
+              Response.status(Response.Status.FORBIDDEN)
+                .type(MIMETYPE_TEXT).entity("Forbidden" + CRLF +
+                   StringUtils.stringifyException(exp) + CRLF)
+                .build());
+        }
+        curr = curr.getCause();
+      }
+    }
+    //TableNotFound may also be buried one level deep
+    if (exp instanceof TableNotFoundException ||
+        exp.getCause() instanceof TableNotFoundException) {
+      throw new WebApplicationException(
+        Response.status(Response.Status.NOT_FOUND)
+          .type(MIMETYPE_TEXT).entity("Not found" + CRLF +
+             StringUtils.stringifyException(exp) + CRLF)
+          .build());
+    }
+    if (exp instanceof NoSuchColumnFamilyException){
+      throw new WebApplicationException(
+        Response.status(Response.Status.NOT_FOUND)
+          .type(MIMETYPE_TEXT).entity("Not found" + CRLF +
+             StringUtils.stringifyException(exp) + CRLF)
+          .build());
+    }
+    if (exp instanceof RuntimeException) {
+      throw new WebApplicationException(
+          Response.status(Response.Status.BAD_REQUEST)
+            .type(MIMETYPE_TEXT).entity("Bad request" + CRLF +
+              StringUtils.stringifyException(exp) + CRLF)
+            .build());
+    }
+    if (exp instanceof RetriesExhaustedWithDetailsException) {
+      RetriesExhaustedWithDetailsException retryException =
+          (RetriesExhaustedWithDetailsException) exp;
+      processException(retryException.getCause(0));
+    }
+    throw new WebApplicationException(
+      Response.status(Response.Status.SERVICE_UNAVAILABLE)
+        .type(MIMETYPE_TEXT).entity("Unavailable" + CRLF +
+          StringUtils.stringifyException(exp) + CRLF)
+        .build());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResourceConfig.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResourceConfig.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResourceConfig.java
new file mode 100644
index 0000000..d397399
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResourceConfig.java
@@ -0,0 +1,31 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+import com.sun.jersey.api.core.PackagesResourceConfig;
+
+@InterfaceAudience.Private
+public class ResourceConfig extends PackagesResourceConfig {
+  public ResourceConfig() {
+    super("org.apache.hadoop.hbase.rest");
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResultGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResultGenerator.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResultGenerator.java
new file mode 100644
index 0000000..989c59e
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResultGenerator.java
@@ -0,0 +1,50 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.rest.model.ScannerModel;
+
+@InterfaceAudience.Private
+public abstract class ResultGenerator implements Iterator<Cell> {
+
+  public static ResultGenerator fromRowSpec(final String table, 
+      final RowSpec rowspec, final Filter filter, final boolean cacheBlocks)
+      throws IOException {
+    if (rowspec.isSingleRow()) {
+      return new RowResultGenerator(table, rowspec, filter, cacheBlocks);
+    } else {
+      return new ScannerResultGenerator(table, rowspec, filter, cacheBlocks);
+    }
+  }
+
+  public static Filter buildFilter(final String filter) throws Exception {
+    return ScannerModel.buildFilter(filter);
+  }
+
+  public abstract void putBack(Cell kv);
+
+  public abstract void close();
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ddb2f19/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
new file mode 100644
index 0000000..c425e84
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
@@ -0,0 +1,106 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.CacheControl;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import javax.ws.rs.core.Response.ResponseBuilder;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.rest.model.TableListModel;
+import org.apache.hadoop.hbase.rest.model.TableModel;
+
+@Path("/")
+@InterfaceAudience.Private
+public class RootResource extends ResourceBase {
+  private static final Log LOG = LogFactory.getLog(RootResource.class);
+
+  static CacheControl cacheControl;
+  static {
+    cacheControl = new CacheControl();
+    cacheControl.setNoCache(true);
+    cacheControl.setNoTransform(false);
+  }
+
+  /**
+   * Constructor
+   * @throws IOException
+   */
+  public RootResource() throws IOException {
+    super();
+  }
+
+  private final TableListModel getTableList() throws IOException {
+    TableListModel tableList = new TableListModel();
+    TableName[] tableNames = servlet.getAdmin().listTableNames();
+    for (TableName name: tableNames) {
+      tableList.add(new TableModel(name.getNameAsString()));
+    }
+    return tableList;
+  }
+
+  @GET
+  @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
+    MIMETYPE_PROTOBUF_IETF})
+  public Response get(final @Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("GET " + uriInfo.getAbsolutePath());
+    }
+    servlet.getMetrics().incrementRequests(1);
+    try {
+      ResponseBuilder response = Response.ok(getTableList());
+      response.cacheControl(cacheControl);
+      servlet.getMetrics().incrementSucessfulGetRequests(1);
+      return response.build();
+    } catch (Exception e) {
+      servlet.getMetrics().incrementFailedGetRequests(1);
+      return processException(e);
+    }
+  }
+
+  @Path("status/cluster")
+  public StorageClusterStatusResource getClusterStatusResource()
+      throws IOException {
+    return new StorageClusterStatusResource();
+  }
+
+  @Path("version")
+  public VersionResource getVersionResource() throws IOException {
+    return new VersionResource();
+  }
+
+  @Path("{table}")
+  public TableResource getTableResource(
+      final @PathParam("table") String table) throws IOException {
+    return new TableResource(table);
+  }
+}


Mime
View raw message