hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject svn commit: r789136 [1/8] - in /hadoop/hbase/trunk/src/contrib: ./ stargate/ stargate/lib/ stargate/src/ stargate/src/java/ stargate/src/java/org/ stargate/src/java/org/apache/ stargate/src/java/org/apache/hadoop/ stargate/src/java/org/apache/hadoop/hb...
Date Sun, 28 Jun 2009 18:21:49 GMT
Author: apurtell
Date: Sun Jun 28 18:21:45 2009
New Revision: 789136

URL: http://svn.apache.org/viewvc?rev=789136&view=rev
Log:
added stargate contrib

Added:
    hadoop/hbase/trunk/src/contrib/stargate/
    hadoop/hbase/trunk/src/contrib/stargate/build.xml
    hadoop/hbase/trunk/src/contrib/stargate/lib/
    hadoop/hbase/trunk/src/contrib/stargate/lib/asm-3.1.jar   (with props)
    hadoop/hbase/trunk/src/contrib/stargate/lib/jackson-asl-0.9.4.jar   (with props)
    hadoop/hbase/trunk/src/contrib/stargate/lib/jaxb-impl-2.1.10.jar   (with props)
    hadoop/hbase/trunk/src/contrib/stargate/lib/jersey-core-1.1.0-ea.jar   (with props)
    hadoop/hbase/trunk/src/contrib/stargate/lib/jersey-json-1.1.0-ea.jar   (with props)
    hadoop/hbase/trunk/src/contrib/stargate/lib/jersey-server-1.1.0-ea.jar   (with props)
    hadoop/hbase/trunk/src/contrib/stargate/lib/jsr311-api-1.1.jar   (with props)
    hadoop/hbase/trunk/src/contrib/stargate/lib/persistence-api-1.0.jar   (with props)
    hadoop/hbase/trunk/src/contrib/stargate/lib/protobuf-java-2.1.0.jar   (with props)
    hadoop/hbase/trunk/src/contrib/stargate/src/
    hadoop/hbase/trunk/src/contrib/stargate/src/java/
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Constants.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Main.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RegionsResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ResourceConfig.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ResultGenerator.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowResultGenerator.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowSpec.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerInstanceResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResultGenerator.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/SchemaResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/StorageClusterStatusResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/StorageClusterVersionResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/TableResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/VersionResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Client.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Cluster.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Response.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/CellModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/CellSetModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ColumnSchemaModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/IProtobufWrapper.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/RowModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ScannerModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/StorageClusterStatusModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/StorageClusterVersionModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableInfoModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableListModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableRegionModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableSchemaModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/VersionModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/CellMessage.proto
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/CellSetMessage.proto
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/ColumnSchemaMessage.proto
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/ScannerMessage.proto
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/StorageClusterStatusMessage.proto
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/TableInfoMessage.proto
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/TableListMessage.proto
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/TableSchemaMessage.proto
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/VersionMessage.proto
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/CellMessage.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/CellSetMessage.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/ColumnSchemaMessage.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/ScannerMessage.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/StorageClusterStatusMessage.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/TableInfoMessage.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/TableListMessage.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/TableSchemaMessage.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/protobuf/generated/VersionMessage.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/JAXBContextResolver.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/consumer/
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/consumer/ProtobufMessageBodyConsumer.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/producer/
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/producer/PlainTextMessageBodyProducer.java
    hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/producer/ProtobufMessageBodyProducer.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/
    hadoop/hbase/trunk/src/contrib/stargate/src/test/hbase-site.xml
    hadoop/hbase/trunk/src/contrib/stargate/src/test/log4j.properties
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/MiniClusterTestCase.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/Test00MiniCluster.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestRowResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestScannerResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestSchemaResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestStatusResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestTableResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestVersionResource.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/model/
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/model/TestCellModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/model/TestCellSetModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/model/TestColumnSchemaModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/model/TestRowModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/model/TestScannerModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/model/TestStorageClusterStatusModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/model/TestStorageClusterVersionModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/model/TestTableInfoModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/model/TestTableListModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/model/TestTableRegionModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/model/TestTableSchemaModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/model/TestVersionModel.java
    hadoop/hbase/trunk/src/contrib/stargate/src/test/zoo.cfg
Modified:
    hadoop/hbase/trunk/src/contrib/build-contrib.xml
    hadoop/hbase/trunk/src/contrib/build.xml

Modified: hadoop/hbase/trunk/src/contrib/build-contrib.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/build-contrib.xml?rev=789136&r1=789135&r2=789136&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/contrib/build-contrib.xml (original)
+++ hadoop/hbase/trunk/src/contrib/build-contrib.xml Sun Jun 28 18:21:45 2009
@@ -70,13 +70,12 @@
 
   <!-- the unit test classpath -->
   <path id="test.classpath">
+    <path refid="classpath"/>
     <pathelement location="${build.test}" />
-    <pathelement location="${hadoop.root}/build/test/classes"/>
+    <pathelement location="${hadoop.root}/build/test"/>
+    <pathelement location="${hadoop.root}/build"/>
     <pathelement location="${hadoop.root}/src/contrib/test"/>
     <pathelement location="${conf.dir}"/>
-    <pathelement location="${hadoop.root}/build"/>
-    <pathelement location="${build.examples}"/>
-    <path refid="classpath"/>
   </path>
 
 

Modified: hadoop/hbase/trunk/src/contrib/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/build.xml?rev=789136&r1=789135&r2=789136&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/contrib/build.xml (original)
+++ hadoop/hbase/trunk/src/contrib/build.xml Sun Jun 28 18:21:45 2009
@@ -31,6 +31,12 @@
       <fileset dir="." includes="*/build.xml"/>
     </subant>
   </target>
+
+  <target name="compile-test">
+    <subant target="compile-test">
+      <fileset dir="." includes="*/build.xml"/>
+    </subant>
+  </target>
   
   <!-- ====================================================== -->
   <!-- Package contrib jars.                                  -->
@@ -46,13 +52,10 @@
   <!-- ====================================================== -->
   <target name="test">
     <subant target="test">
-      <fileset dir="." includes="streaming/build.xml"/>
-      <fileset dir="." includes="fairscheduler/build.xml"/>
-      <fileset dir="." includes="capacity-scheduler/build.xml"/>
+      <fileset dir="." includes="*/build.xml"/>
     </subant>
   </target>
   
-  
   <!-- ====================================================== -->
   <!-- Clean all the contribs.                              -->
   <!-- ====================================================== -->

Added: hadoop/hbase/trunk/src/contrib/stargate/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/build.xml?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/build.xml (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/build.xml Sun Jun 28 18:21:45 2009
@@ -0,0 +1,120 @@
+<project name="stargate" default="archives" basedir=".">
+
+  <import file="../build-contrib.xml"/>
+  
+  <property name="version" value="0.0.1"/>
+  <property name="build.war.classes" location="${build.dir}/warClasses"/>
+  
+  <property name="war.file" value="stargate-${version}.war"/>
+  <property name="web.xml.file" value="${conf.dir}/web.xml"/>
+  
+  <property name="jar.file" value="stargate-${version}.jar"/>
+  <property name="test.jar.file" value="stargate-${version}-test.jar"/>
+  
+  <property name="javac.debug" value="on"/>
+  <property name="javac.source" value="1.6"/>
+  
+  <target name="init">
+    <tstamp/>
+    <mkdir dir="${build.dir}"/>
+    <mkdir dir="${build.classes}"/>
+    <mkdir dir="${build.war.classes}"/>
+    <mkdir dir="${build.test}"/>
+  </target>
+  
+  <target name="javadoc">
+    <javadoc access="public" destdir="${docs.dir}" source="${javac.source}" sourcepath="${src.dir}" splitindex="true" use="true" version="true">
+      <classpath refid="classpath"/>
+    </javadoc>
+  </target>
+  
+  <target name="compile" depends="compile-jar, compile-war"/>
+
+  <target name="compile-jar" depends="init">
+    <javac srcdir="${src.dir}" destdir="${build.classes}" debug="${javac.debug}" source="${javac.source}">
+      <classpath refid="classpath"/>
+    </javac>
+  </target>
+  
+  <target name="compile-war" depends="init">
+    <javac srcdir="${src.dir}" destdir="${build.war.classes}" debug="${javac.debug}" source="${javac.source}">
+      <classpath refid="classpath"/>
+      <exclude name="**/Main.java"/>
+    </javac>
+  </target>
+  
+  <target name="compile-test" depends="compile-war">
+    <javac srcdir="${src.test}" includes="**/*.java" destdir="${build.test}" debug="${javac.debug}" source="1.6">
+      <classpath refid="test.classpath"/>
+    </javac>
+  </target>
+  
+  <target name="test" depends="compile-test" description="Build test jar and run tests">
+    <delete dir="${test.log.dir}"/>
+    <mkdir dir="${test.log.dir}"/>
+    <junit printsummary="yes" showoutput="${test.output}" haltonfailure="no" fork="yes" forkmode="once" maxmemory="1000m" errorProperty="tests.failed" failureProperty="tests.failed" >
+      <sysproperty key="test.build.data" value="${build.test}/data"/>
+      <sysproperty key="build.test" value="${build.test}"/>
+      <sysproperty key="user.dir" value="${build.test}/data"/>
+      <sysproperty key="test.log.dir" value="${hadoop.log.dir}"/>
+      <classpath refid="test.classpath"/>
+      <formatter type="${test.junit.output.format}"/>
+      <batchtest todir="${build.test}" unless="testcase">
+        <fileset dir="${src.test}" includes="**/Test*.java"/>
+      </batchtest>
+      <batchtest todir="${build.test}" if="testcase">
+        <fileset dir="${src.test}" includes="**/${testcase}.java"/>
+      </batchtest>
+    </junit>
+    <fail if="tests.failed">
+      Tests failed!
+    </fail>
+  </target>
+ 
+  <target name="war" depends="compile-war">
+    <copy todir="${build.dir}/lib" overwrite="true">
+      <fileset dir="${lib.dir}"/>
+      <mapper type="flatten"/>
+    </copy>
+    <copy todir="${build.war.classes}" overwrite="true">
+      <fileset dir="${conf.dir}">
+        <include name="zoo.cfg"/>
+      </fileset>
+    </copy>
+    
+    <war destfile="${build.dir}/${war.file}" webxml="${web.xml.file}">
+      <lib dir="${build.dir}/lib"/>
+      <classes dir="${build.war.classes}"/>
+    </war>
+  </target>
+  
+  <target name="jar" depends="compile-jar">
+    <jar jarfile="${build.dir}/${jar.file}" basedir="${build.classes}"/>
+  </target>
+  
+  <target name="createDist" depends="jar, war">
+    <mkdir dir="${dist.dir}"/>
+    <mkdir dir="${dist.dir}/webapps"/>
+	<mkdir dir="${dist.dir}/logs"/>
+    <copy todir="${dist.dir}/lib" overwrite="true">
+      <fileset dir="${lib.dir}/jetty"/>
+      <fileset dir="${lib.dir}/general"/>
+      <mapper type="flatten"/>
+    </copy>
+    <copy todir="${dist.dir}/bin" overwrite="true">
+      <fileset dir="${basedir}/bin"/>
+      <mapper type="flatten"/>
+    </copy>
+    <chmod perm="ugo+x" type="file">
+      <fileset dir="${dist.dir}/bin"/>
+    </chmod>
+    <copy todir="${dist.dir}" overwrite="true" file="${build.dir}/${jar.file}"/>
+    <copy todir="${dist.dir}/webapps" overwrite="true" file="${build.dir}/${war.file}"/>
+  </target>
+  
+  <target name="clean">
+    <delete dir="build"/>
+    <delete dir="dist"/>
+  </target>
+
+</project>

Added: hadoop/hbase/trunk/src/contrib/stargate/lib/asm-3.1.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/lib/asm-3.1.jar?rev=789136&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/src/contrib/stargate/lib/asm-3.1.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/src/contrib/stargate/lib/jackson-asl-0.9.4.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/lib/jackson-asl-0.9.4.jar?rev=789136&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/src/contrib/stargate/lib/jackson-asl-0.9.4.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/src/contrib/stargate/lib/jaxb-impl-2.1.10.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/lib/jaxb-impl-2.1.10.jar?rev=789136&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/src/contrib/stargate/lib/jaxb-impl-2.1.10.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/src/contrib/stargate/lib/jersey-core-1.1.0-ea.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/lib/jersey-core-1.1.0-ea.jar?rev=789136&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/src/contrib/stargate/lib/jersey-core-1.1.0-ea.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/src/contrib/stargate/lib/jersey-json-1.1.0-ea.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/lib/jersey-json-1.1.0-ea.jar?rev=789136&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/src/contrib/stargate/lib/jersey-json-1.1.0-ea.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/src/contrib/stargate/lib/jersey-server-1.1.0-ea.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/lib/jersey-server-1.1.0-ea.jar?rev=789136&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/src/contrib/stargate/lib/jersey-server-1.1.0-ea.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/src/contrib/stargate/lib/jsr311-api-1.1.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/lib/jsr311-api-1.1.jar?rev=789136&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/src/contrib/stargate/lib/jsr311-api-1.1.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/src/contrib/stargate/lib/persistence-api-1.0.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/lib/persistence-api-1.0.jar?rev=789136&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/src/contrib/stargate/lib/persistence-api-1.0.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/src/contrib/stargate/lib/protobuf-java-2.1.0.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/lib/protobuf-java-2.1.0.jar?rev=789136&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/src/contrib/stargate/lib/protobuf-java-2.1.0.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Constants.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Constants.java?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Constants.java (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Constants.java Sun Jun 28 18:21:45 2009
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+public interface Constants {
+  public static final String MIMETYPE_TEXT = "text/plain";
+  public static final String MIMETYPE_XML = "text/xml";
+  public static final String MIMETYPE_BINARY = "application/octet-stream";
+  public static final String MIMETYPE_PROTOBUF = "application/x-protobuf";
+  public static final String MIMETYPE_JSON = "application/json";
+  public static final String MIMETYPE_JAVASCRIPT = "application/x-javascript";
+  
+  public static final String PATH_STATUS_CLUSTER = "/status/cluster";
+  public static final String PATH_VERSION = "/version";
+  public static final String PATH_VERSION_CLUSTER = "/version/cluster";
+}

Added: hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Main.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Main.java?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Main.java (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Main.java Sun Jun 28 18:21:45 2009
@@ -0,0 +1,106 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.PosixParser;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.mortbay.jetty.Connector;
+import org.mortbay.jetty.Handler;
+import org.mortbay.jetty.NCSARequestLog;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.deployer.WebAppDeployer;
+import org.mortbay.jetty.handler.ContextHandlerCollection;
+import org.mortbay.jetty.handler.DefaultHandler;
+import org.mortbay.jetty.handler.HandlerCollection;
+import org.mortbay.jetty.handler.RequestLogHandler;
+import org.mortbay.jetty.nio.SelectChannelConnector;
+import org.mortbay.thread.QueuedThreadPool;
+
+public class Main {
+
+  public static void main(String[] args) throws Exception {
+    // process command line
+    Options options = new Options();
+    options.addOption("p", "port", true, "service port");
+    CommandLineParser parser = new PosixParser();
+    CommandLine cmd = parser.parse(options, args);
+    int port = 8080;
+    if (cmd.hasOption("p")) {
+      port = Integer.valueOf(cmd.getOptionValue("p"));
+    }
+
+    HBaseConfiguration conf = new HBaseConfiguration();
+    if (cmd.hasOption("m")) {
+      conf.set("hbase.master", cmd.getOptionValue("m"));
+    }
+
+    /*
+     * RuntimeMXBean runtime = ManagementFactory.getRuntimeMXBean(); if (runtime
+     * != null) { LOG.info("vmName=" + runtime.getVmName() + ", vmVendor=" +
+     * runtime.getVmVendor() + ", vmVersion=" + runtime.getVmVersion());
+     * LOG.info("vmInputArguments=" + runtime.getInputArguments()); }
+     */
+    /*
+     * poached from:
+     * http://jetty.mortbay.org/xref/org/mortbay/jetty/example/LikeJettyXml.html
+     */
+    String jetty_home = ".";
+    Server server = new Server();
+
+    QueuedThreadPool threadPool = new QueuedThreadPool();
+    threadPool.setMaxThreads(100);
+    server.setThreadPool(threadPool);
+
+    Connector connector = new SelectChannelConnector();
+    connector.setPort(port);
+    connector.setMaxIdleTime(30000);
+    server.setConnectors(new Connector[] { connector });
+
+    HandlerCollection handlers = new HandlerCollection();
+    ContextHandlerCollection contexts = new ContextHandlerCollection();
+    RequestLogHandler requestLogHandler = new RequestLogHandler();
+    handlers.setHandlers(new Handler[] { contexts, new DefaultHandler(), 
+    										requestLogHandler });
+    server.setHandler(handlers);
+
+    WebAppDeployer deployer1 = new WebAppDeployer();
+    deployer1.setContexts(contexts);
+    deployer1.setWebAppDir(jetty_home + "/webapps");
+    deployer1.setParentLoaderPriority(false);
+    deployer1.setExtract(true);
+    deployer1.setAllowDuplicates(false);
+    // deployer1.setDefaultsDescriptor(jetty_home + "/etc/webdefault.xml");
+    server.addLifeCycle(deployer1);
+
+    NCSARequestLog requestLog = new NCSARequestLog(jetty_home 
+    		+ "/logs/jetty-yyyy_mm_dd.log");
+    requestLog.setExtended(false);
+    requestLogHandler.setRequestLog(requestLog);
+    
+    server.setStopAtShutdown(true);
+    server.setSendServerVersion(true);
+    server.start();
+    server.join();
+  }
+}

Added: hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java Sun Jun 28 18:21:45 2009
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTablePool;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import com.sun.jersey.server.impl.container.servlet.ServletAdaptor;
+import com.sun.jersey.spi.container.servlet.ServletContainer;
+
+public class RESTServlet extends ServletAdaptor {
+  
+  private static final long serialVersionUID = 1L;  
+  public static final int DEFAULT_MAX_AGE = 60 * 60 * 4;       // 4 hours
+  public static final String VERSION_STRING = "0.0.1";
+
+  private static RESTServlet instance;
+
+  private final HBaseConfiguration conf;
+  protected Map<String,Integer> maxAgeMap = 
+    Collections.synchronizedMap(new HashMap<String,Integer>());
+
+  public synchronized static RESTServlet getInstance() throws IOException {
+    if (instance == null) {
+      instance = new RESTServlet();
+    }
+    return instance;
+  }
+
+  public RESTServlet() throws IOException {
+    this.conf = new HBaseConfiguration();
+  }
+
+
+  protected HTablePool getTablePool(String name) {
+    return HTablePool.getPool(conf, Bytes.toBytes(name));
+  }
+
+  protected HBaseConfiguration getConfiguration() {
+    return conf;
+  }
+
+  /**
+   * @param tableName
+   * @return the maximum cache age suitable for use with this table, in
+   *  seconds 
+   * @throws IOException
+   */
+  public int getMaxAge(String tableName) throws IOException {
+    Integer i = maxAgeMap.get(tableName);
+    if (i != null) {
+      return i.intValue();
+    }
+    HTablePool pool = this.getTablePool(tableName);
+    HTable table = pool.get();
+    if (table != null) {
+      int maxAge = DEFAULT_MAX_AGE;
+      for (HColumnDescriptor family:
+          table.getTableDescriptor().getFamilies()) {
+        int ttl = family.getTimeToLive();
+        if (ttl < 0) {
+          continue;
+        }
+        if (ttl < maxAge) {
+          maxAge = ttl;
+        }
+      }
+      maxAgeMap.put(tableName, maxAge);
+      return maxAge;
+    }
+    return DEFAULT_MAX_AGE;
+  }
+
+  public void invalidateMaxAge(String tableName) {
+    maxAgeMap.remove(tableName);
+  }
+
+  public static final String getVersion() {
+    StringBuilder version = new StringBuilder();
+    version.append("Stargate ");
+    version.append(VERSION_STRING);
+    version.append(" [JVM: ");
+    version.append(System.getProperty("java.vm.vendor"));
+    version.append(' ');
+    version.append(System.getProperty("java.version"));
+    version.append('-');
+    version.append(System.getProperty("java.vm.version"));
+    version.append("] [OS: ");
+    version.append(System.getProperty("os.name"));
+    version.append(' ');
+    version.append(System.getProperty("os.version"));
+    version.append(' ');
+    version.append(System.getProperty("os.arch"));
+    version.append("] [Jersey: ");
+    version.append(ServletContainer.class.getPackage()
+      .getImplementationVersion());
+    version.append(']');
+    return version.toString();
+  }  
+}

Added: hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RegionsResource.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RegionsResource.java?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RegionsResource.java (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RegionsResource.java Sun Jun 28 18:21:45 2009
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.Map;
+import javax.ws.rs.GET;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.CacheControl;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.ResponseBuilder;
+import javax.ws.rs.core.UriInfo;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HServerAddress;
+import org.apache.hadoop.hbase.TableNotFoundException;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTablePool;
+import org.apache.hadoop.hbase.stargate.model.TableInfoModel;
+import org.apache.hadoop.hbase.stargate.model.TableRegionModel;
+
+public class RegionsResource implements Constants {
+  private static final Log LOG = LogFactory.getLog(RegionsResource.class);
+
+  private String table;
+  private CacheControl cacheControl;
+
+  public RegionsResource(String table) {
+    this.table = table;
+    cacheControl = new CacheControl();
+    cacheControl.setNoCache(true);
+    cacheControl.setNoTransform(false);
+  }
+
+  private Map<HRegionInfo,HServerAddress> getTableRegions()
+      throws IOException {
+    HTablePool pool = RESTServlet.getInstance().getTablePool(this.table);
+    HTable table = pool.get();
+    try {
+      return table.getRegionsInfo();
+    } finally {
+      pool.put(table);
+    }
+  }
+
+  @GET
+  @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_JAVASCRIPT,
+    MIMETYPE_PROTOBUF})
+  public Response get(@Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("GET " + uriInfo.getAbsolutePath());
+    }
+    try {
+      TableInfoModel model = new TableInfoModel(table);
+      Map<HRegionInfo,HServerAddress> regions = getTableRegions();
+      for (Map.Entry<HRegionInfo,HServerAddress> e: regions.entrySet()) {
+        HRegionInfo hri = e.getKey();
+        HServerAddress addr = e.getValue();
+        InetSocketAddress sa = addr.getInetSocketAddress();
+        model.add(
+          new TableRegionModel(table, hri.getRegionId(), hri.getStartKey(),
+                hri.getEndKey(),
+                sa.getHostName() + ":" + Integer.valueOf(sa.getPort())));
+      }
+      ResponseBuilder response = Response.ok(model);
+      response.cacheControl(cacheControl);
+      return response.build();
+    } catch (TableNotFoundException e) {
+      throw new WebApplicationException(Response.Status.NOT_FOUND);
+    } catch (IOException e) {
+      throw new WebApplicationException(e,
+                  Response.Status.SERVICE_UNAVAILABLE);
+    }
+  }
+}

Added: hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ResourceConfig.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ResourceConfig.java?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ResourceConfig.java (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ResourceConfig.java Sun Jun 28 18:21:45 2009
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import com.sun.jersey.api.core.PackagesResourceConfig;
+
+public class ResourceConfig extends PackagesResourceConfig {
+  public ResourceConfig() {
+    super("org.apache.hadoop.hbase.stargate");
+  }
+}

Added: hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ResultGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ResultGenerator.java?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ResultGenerator.java (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ResultGenerator.java Sun Jun 28 18:21:45 2009
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.hbase.KeyValue;
+
+public abstract class ResultGenerator implements Iterator<KeyValue> {
+  public static ResultGenerator fromRowSpec(String table, RowSpec rowspec) 
+      throws IOException {
+    if (rowspec.isSingleRow()) {
+      return new RowResultGenerator(table, rowspec);
+    } else {
+      return new ScannerResultGenerator(table, rowspec);
+    }
+  }
+
+  public abstract void close();
+}

Added: hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowResource.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowResource.java?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowResource.java (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowResource.java Sun Jun 28 18:21:45 2009
@@ -0,0 +1,342 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.io.IOException;
+import java.util.List;
+
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.CacheControl;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.ResponseBuilder;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTablePool;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.stargate.model.CellModel;
+import org.apache.hadoop.hbase.stargate.model.CellSetModel;
+import org.apache.hadoop.hbase.stargate.model.RowModel;
+import org.apache.hadoop.hbase.util.Bytes;
+
+public class RowResource implements Constants {
+  private static final Log LOG = LogFactory.getLog(RowResource.class);
+
+  private String table;
+  private RowSpec rowspec;
+  private CacheControl cacheControl;
+
+  public RowResource(String table, String rowspec, String versions) 
+      throws IOException {
+    this.table = table;
+    this.rowspec = new RowSpec(rowspec);
+    if (versions != null) {
+      this.rowspec.setMaxVersions(Integer.valueOf(versions));
+    }
+    cacheControl = new CacheControl();
+    cacheControl.setMaxAge(RESTServlet.getInstance().getMaxAge(table));
+    cacheControl.setNoTransform(false);
+  }
+
+  @GET
+  @Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_JAVASCRIPT,
+    MIMETYPE_PROTOBUF})
+  public Response get(@Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("GET " + uriInfo.getAbsolutePath());
+    }
+    try {
+      ResultGenerator generator = ResultGenerator.fromRowSpec(table, rowspec);
+      if (!generator.hasNext()) {
+        throw new WebApplicationException(Response.Status.NOT_FOUND);
+      }
+      CellSetModel model = new CellSetModel();
+      KeyValue value = generator.next();
+      byte[] rowKey = value.getRow();
+      RowModel rowModel = new RowModel(rowKey);
+      do {
+        if (!Bytes.equals(value.getRow(), rowKey)) {
+          model.addRow(rowModel);
+          rowKey = value.getRow();
+          rowModel = new RowModel(rowKey);
+        }
+        rowModel.addCell(
+          new CellModel(value.getColumn(), value.getTimestamp(),
+              value.getValue()));
+        value = generator.next();
+      } while (value != null);
+      model.addRow(rowModel);
+      ResponseBuilder response = Response.ok(model);
+      response.cacheControl(cacheControl);
+      return response.build();
+    } catch (IOException e) {
+      throw new WebApplicationException(e,
+                  Response.Status.SERVICE_UNAVAILABLE);
+    }
+  }
+
+  @GET
+  @Produces(MIMETYPE_BINARY)
+  public Response getBinary(@Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+    }
+    // doesn't make sense to use a non specific coordinate as this can only
+    // return a single cell
+    if (!rowspec.hasColumns() || rowspec.getColumns().length > 1) {
+      throw new WebApplicationException(Response.Status.BAD_REQUEST);
+    }
+    try {
+      ResultGenerator generator = ResultGenerator.fromRowSpec(table, rowspec);
+      if (!generator.hasNext()) {
+        throw new WebApplicationException(Response.Status.NOT_FOUND);
+      }
+      KeyValue value = generator.next();
+      ResponseBuilder response = Response.ok(value.getValue());
+      response.cacheControl(cacheControl);
+      response.header("X-Timestamp", value.getTimestamp());
+      return response.build();
+    } catch (IOException e) {
+      throw new WebApplicationException(e,
+                  Response.Status.SERVICE_UNAVAILABLE);
+    }
+  }
+
+  private void deleteRow() {
+    HTablePool pool;
+    try {
+      pool = RESTServlet.getInstance().getTablePool(this.table);
+    } catch (IOException e) {
+      throw new WebApplicationException(e, 
+                  Response.Status.INTERNAL_SERVER_ERROR);
+    }
+    HTable table = null;
+    try {
+      table = pool.get();
+      table.delete(new Delete(rowspec.getRow()));
+      table.flushCommits();
+    } catch (IOException e) {
+      throw new WebApplicationException(e,
+                  Response.Status.SERVICE_UNAVAILABLE);
+    } finally {
+      if (table != null) {
+        pool.put(table);
+      }
+    }
+  }
+
+  private Response update(CellSetModel model, boolean replace) {
+    if (replace) {
+      deleteRow();
+    }
+    HTablePool pool;
+    try {
+      pool = RESTServlet.getInstance().getTablePool(this.table);
+    } catch (IOException e) {
+      throw new WebApplicationException(e, 
+                  Response.Status.INTERNAL_SERVER_ERROR);
+    }
+    HTable table = null;
+    try {
+      table = pool.get();
+      for (RowModel row: model.getRows()) {
+        Put put = new Put(row.getKey());
+        for (CellModel cell: row.getCells()) {
+          if (LOG.isDebugEnabled()) {
+            LOG.debug("update cell '" +
+              Bytes.toStringBinary(cell.getColumn()) + "' @" +
+                cell.getTimestamp() + " length " + cell.getValue().length);
+          }
+          byte [][] parts = KeyValue.parseColumn(cell.getColumn());
+          if (cell.hasUserTimestamp()) {
+            put.add(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
+          } else {
+            put.add(parts[0], parts[1], cell.getValue());
+          }
+        }
+        table.put(put);
+      }
+      table.flushCommits();
+      ResponseBuilder response = Response.ok();
+      return response.build();
+    } catch (IOException e) {
+      throw new WebApplicationException(e,
+                  Response.Status.SERVICE_UNAVAILABLE);
+    } finally {
+      if (table != null) {
+        pool.put(table);
+      }
+    }
+  }
+
+  private Response updateBinary(byte[] message, HttpHeaders headers, 
+      boolean replace) {
+    if (replace) {
+      deleteRow();
+    }
+    HTablePool pool;
+    try {
+      pool = RESTServlet.getInstance().getTablePool(this.table);
+    } catch (IOException e) {
+      throw new WebApplicationException(e, 
+                  Response.Status.INTERNAL_SERVER_ERROR);
+    }
+    HTable table = null;    
+    try {
+      byte[] row = rowspec.getRow();
+      byte[][] columns = rowspec.getColumns();
+      byte[] column = null;
+      if (columns != null) {
+        column = columns[0];
+      }
+      long timestamp = -1;
+      List<String> vals = headers.getRequestHeader("X-Row");
+      if (vals != null && !vals.isEmpty()) {
+        row = Bytes.toBytes(vals.get(0));
+      }
+      vals = headers.getRequestHeader("X-Column");
+      if (vals != null && !vals.isEmpty()) {
+        column = Bytes.toBytes(vals.get(0));
+      }
+      vals = headers.getRequestHeader("X-Timestamp");
+      if (vals != null && !vals.isEmpty()) {
+        timestamp = Long.valueOf(vals.get(0));
+      }
+      if (column == null) {
+        throw new WebApplicationException(Response.Status.BAD_REQUEST);
+      }
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("update cell '" + Bytes.toStringBinary(column) + "' @" +
+          timestamp + " length " + message.length);
+      }
+      Put put = new Put(row);
+      byte parts[][] = KeyValue.parseColumn(column);
+      if (timestamp >= 0) {
+        put.add(parts[0], parts[1], timestamp, message);
+      } else {
+        put.add(parts[0], parts[1], message);
+      }
+      table = pool.get();
+      table.put(put);
+      table.flushCommits();
+      return Response.ok().build();
+    } catch (IOException e) {
+      throw new WebApplicationException(e,
+                  Response.Status.SERVICE_UNAVAILABLE);
+    } finally {
+      if (table != null) {
+        pool.put(table);
+      }
+    }
+  }
+
+  @PUT
+  @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_JAVASCRIPT,
+    MIMETYPE_PROTOBUF})
+  public Response put(CellSetModel model, @Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("PUT " + uriInfo.getAbsolutePath());
+    }
+    return update(model, true);
+  }
+
+  @PUT
+  @Consumes(MIMETYPE_BINARY)
+  public Response putBinary(byte[] message, @Context UriInfo uriInfo, 
+      @Context HttpHeaders headers)
+  {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+    }
+    return updateBinary(message, headers, true);
+  }
+
+  @POST
+  @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_JAVASCRIPT,
+    MIMETYPE_PROTOBUF})
+  public Response post(CellSetModel model, @Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("POST " + uriInfo.getAbsolutePath());
+    }
+    return update(model, false);
+  }
+
+  @POST
+  @Consumes(MIMETYPE_BINARY)
+  public Response postBinary(byte[] message, @Context UriInfo uriInfo, 
+      @Context HttpHeaders headers)
+  {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("POST " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+    }
+    return updateBinary(message, headers, false);
+  }
+
+  @DELETE
+  public Response delete(@Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+    }
+    Delete delete = new Delete(rowspec.getRow());
+    for (byte[] column: rowspec.getColumns()) {
+      byte[][] split = KeyValue.parseColumn(column);
+      if (rowspec.hasTimestamp()) {
+        delete.deleteColumns(split[0], split[1], rowspec.getTimestamp());
+      } else {
+        delete.deleteColumns(split[0], split[1]);        
+      }
+    }
+    HTablePool pool;
+    try {
+      pool = RESTServlet.getInstance().getTablePool(this.table);
+    } catch (IOException e) {
+      throw new WebApplicationException(e, 
+                  Response.Status.INTERNAL_SERVER_ERROR);
+    }
+    HTable table = null;
+    try {
+      table = pool.get();
+      table.delete(delete);
+      table.flushCommits();
+    } catch (IOException e) {
+      throw new WebApplicationException(e, 
+                  Response.Status.SERVICE_UNAVAILABLE);
+    } finally {
+      if (table != null) {
+        pool.put(table);
+      }
+    }
+    return Response.ok().build();
+  }
+}

Added: hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowResultGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowResultGenerator.java?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowResultGenerator.java (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowResultGenerator.java Sun Jun 28 18:21:45 2009
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTablePool;
+import org.apache.hadoop.hbase.client.Result;
+
+public class RowResultGenerator extends ResultGenerator {
+  private Iterator<KeyValue> valuesI;
+
+  public RowResultGenerator(String tableName, RowSpec rowspec)
+      throws IllegalArgumentException, IOException {
+    HTablePool pool = RESTServlet.getInstance().getTablePool(tableName); 
+    HTable table = pool.get();
+    try {
+      Get get = new Get(rowspec.getRow());
+      if (rowspec.hasColumns()) {
+        get.addColumns(rowspec.getColumns());
+      } else {
+        // rowspec does not explicitly specify columns, return them all
+        for (HColumnDescriptor family: 
+            table.getTableDescriptor().getFamilies()) {
+          get.addFamily(family.getName());
+        }
+      }
+      get.setTimeRange(rowspec.getStartTime(), rowspec.getEndTime());
+      get.setMaxVersions(rowspec.getMaxVersions());
+      Result result = table.get(get);
+      if (result != null && !result.isEmpty()) {
+        valuesI = result.list().iterator();
+      }
+    } finally {
+      pool.put(table);
+    }
+  }
+
+  public void close() {
+  }
+
+  public boolean hasNext() {
+    if (valuesI == null) {
+      return false;
+    }
+    return valuesI.hasNext();
+  }
+
+  public KeyValue next() {
+    if (valuesI == null) {
+      return null;
+    }
+    try {
+      return valuesI.next();
+    } catch (NoSuchElementException e) {
+      return null;
+    }
+  }
+
+  public void remove() {
+    throw new UnsupportedOperationException("remove not supported");
+  }
+}

Added: hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowSpec.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowSpec.java?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowSpec.java (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowSpec.java Sun Jun 28 18:21:45 2009
@@ -0,0 +1,310 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.util.Collection;
+import java.util.TreeSet;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.util.Bytes;
+
+public class RowSpec {
+  public static final long DEFAULT_START_TIMESTAMP = 0;
+  public static final long DEFAULT_END_TIMESTAMP = Long.MAX_VALUE;
+
+  private byte[] row = HConstants.EMPTY_START_ROW;
+  private byte[] endRow = null;
+  private TreeSet<byte[]> columns =
+    new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
+  private long startTime = DEFAULT_START_TIMESTAMP;
+  private long endTime = DEFAULT_END_TIMESTAMP;
+  private int maxVersions = HColumnDescriptor.DEFAULT_VERSIONS;
+
+  public RowSpec(String path) throws IllegalArgumentException {
+    int i = 0;
+    while (path.charAt(i) == '/') {
+      i++;
+    }
+    i = parseRowKeys(path, i);
+    i = parseColumns(path, i);
+    i = parseTimestamp(path, i);
+  }
+
+  private int parseRowKeys(String path, int i)
+      throws IllegalArgumentException {
+    StringBuilder startRow = new StringBuilder();
+    StringBuilder endRow = null;
+    try {
+      char c;
+      boolean doEndRow = false;
+      while (i < path.length() && (c = path.charAt(i)) != '/') {
+        if (c == ',') {
+          doEndRow = true;
+          i++;
+          break;
+        }
+        startRow.append(c);
+        i++;
+      }
+      i++;
+      this.row = Bytes.toBytes(startRow.toString());
+      if (doEndRow) {
+        endRow = new StringBuilder();
+        while ((c = path.charAt(i)) != '/') {
+          endRow.append(c);
+          i++;
+        }
+        i++;
+      }
+    } catch (IndexOutOfBoundsException e) {
+      throw new IllegalArgumentException(e);
+    }
+    // HBase does not support wildcards on row keys so we will emulate a
+    // suffix glob by synthesizing appropriate start and end row keys for
+    // table scanning
+    if (startRow.charAt(startRow.length() - 1) == '*') {
+      if (endRow != null)
+        throw new IllegalArgumentException("invalid path: start row "+
+          "specified with wildcard");
+      this.row = Bytes.toBytes(startRow.substring(0, 
+                   startRow.lastIndexOf("*")));
+      this.endRow = new byte[this.row.length + 1];
+      System.arraycopy(this.row, 0, this.endRow, 0, this.row.length);
+      this.endRow[this.row.length] = (byte)255;
+    } else {
+      this.row = Bytes.toBytes(startRow.toString());
+      if (endRow != null) {
+        this.endRow = Bytes.toBytes(endRow.toString());
+      }
+    }
+    return i;
+  }
+
+  private int parseColumns(String path, int i)
+      throws IllegalArgumentException {
+    if (i >= path.length()) {
+      return i;
+    }
+    try {
+      char c;
+      StringBuilder column = new StringBuilder();
+      boolean hasColon = false;
+      while (i < path.length() && (c = path.charAt(i)) != '/') {
+        if (c == ',') {
+          if (column.length() < 1) {
+            throw new IllegalArgumentException("invalid path");
+          }
+          if (!hasColon) {
+            column.append(':');
+          }
+          this.columns.add(Bytes.toBytes(column.toString()));
+          column = new StringBuilder();
+          hasColon = false;
+          i++;
+          continue;
+        }
+        if (c == ':') {
+          hasColon = true;
+        }
+        column.append(c);
+        i++;
+      }
+      i++;
+      // trailing list entry
+      if (column.length() > 1) {
+        if (!hasColon) {
+          column.append(':');
+        }
+        this.columns.add(Bytes.toBytes(column.toString()));
+      }
+    } catch (IndexOutOfBoundsException e) {
+      throw new IllegalArgumentException(e);
+    }
+    return i;
+  }
+
+  private int parseTimestamp(String path, int i)
+      throws IllegalArgumentException {
+    if (i >= path.length()) {
+      return i;
+    }
+    long time0 = 0, time1 = 0;
+    try {
+      char c = 0;
+      StringBuilder stamp = new StringBuilder();
+      while (i < path.length()) {
+        c = path.charAt(i);
+        if (c == '/' || c == ',') {
+          break;
+        }
+        stamp.append(c);
+        i++;
+      }
+      try {
+        time0 = Long.valueOf(stamp.toString());
+      } catch (NumberFormatException e) {
+        throw new IllegalArgumentException(e);
+      }
+      if (c == ',') {
+        stamp = new StringBuilder();
+        i++;
+        while (i < path.length() && ((c = path.charAt(i)) != '/')) {
+          stamp.append(c);
+          i++;
+        }
+        try {
+          time1 = Long.valueOf(stamp.toString());
+        } catch (NumberFormatException e) {
+          throw new IllegalArgumentException(e);
+        }
+      }
+      if (c == '/') {
+        i++;
+      }
+    } catch (IndexOutOfBoundsException e) {
+      throw new IllegalArgumentException(e);
+    }
+    if (time1 != 0) {
+      startTime = time0;
+      endTime = time1;
+    } else {
+      endTime = time0;
+    }
+    return i;
+  }
+
+  public RowSpec(byte[] startRow, byte[] endRow, byte[][] columns,
+      long startTime, long endTime, int maxVersions) {
+    this.row = startRow;
+    this.endRow = endRow;
+    if (columns != null) {
+      for (byte[] col: columns) {
+        this.columns.add(col);
+      }
+    }
+    this.startTime = startTime;
+    this.endTime = endTime;
+    this.maxVersions = maxVersions;
+  }
+
+  public RowSpec(byte[] startRow, byte[] endRow, Collection<byte[]> columns,
+      long startTime, long endTime, int maxVersions) {
+    this.row = startRow;
+    this.endRow = endRow;
+    if (columns != null) {
+      this.columns.addAll(columns);
+    }
+    this.startTime = startTime;
+    this.endTime = endTime;
+    this.maxVersions = maxVersions;
+  }
+
+  public boolean isSingleRow() {
+    return endRow == null;
+  }
+
+  public int getMaxVersions() {
+    return maxVersions;
+  }
+
+  public void setMaxVersions(int maxVersions) {
+    this.maxVersions = maxVersions;
+  }
+
+  public boolean hasColumns() {
+    return !columns.isEmpty();
+  }
+
+  public byte[] getRow() {
+    return row;
+  }
+
+  public byte[] getStartRow() {
+    return row;
+  }
+
+  public boolean hasEndRow() {
+    return endRow != null;
+  }
+
+  public byte[] getEndRow() {
+    return endRow;
+  }
+
+  public void addColumn(byte[] column) {
+    columns.add(column);
+  }
+
+  public byte[][] getColumns() {
+    return columns.toArray(new byte[columns.size()][]);
+  }
+
+  public boolean hasTimestamp() {
+    return (startTime == 0) && (endTime != Long.MAX_VALUE);
+  }
+
+  public long getTimestamp() {
+    return endTime;
+  }
+
+  public long getStartTime() {
+    return startTime;
+  }
+
+  public void setStartTime(long startTime) {
+    this.startTime = startTime;
+  }
+
+  public long getEndTime() {
+    return endTime;
+  }
+
+  public void setEndTime(long endTime) {
+    this.endTime = endTime;
+  }
+
+  public String toString() {
+    StringBuilder result = new StringBuilder();
+    result.append("{startRow => '");
+    if (row != null) {
+      result.append(Bytes.toString(row));
+    }
+    result.append("', endRow => '");
+    if (endRow != null)  {
+      result.append(Bytes.toString(endRow));
+    }
+    result.append("', columns => [");
+    for (byte[] col: columns) {
+      result.append(" '");
+      result.append(Bytes.toString(col));
+      result.append("'");
+    }
+    result.append(" ], startTime => ");
+    result.append(Long.toString(startTime));
+    result.append(", endTime => ");
+    result.append(Long.toString(endTime));
+    result.append(", maxVersions => ");
+    result.append(Integer.toString(maxVersions));
+    result.append("}");
+    return result.toString();
+  }
+}

Added: hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerInstanceResource.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerInstanceResource.java?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerInstanceResource.java (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerInstanceResource.java Sun Jun 28 18:21:45 2009
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.io.IOException;
+
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.CacheControl;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.ResponseBuilder;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.stargate.model.CellModel;
+import org.apache.hadoop.hbase.stargate.model.CellSetModel;
+import org.apache.hadoop.hbase.stargate.model.RowModel;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import com.sun.jersey.core.util.Base64;
+
+public class ScannerInstanceResource implements Constants {
+  private static final Log LOG =
+    LogFactory.getLog(ScannerInstanceResource.class);
+
+  protected ResultGenerator generator;
+  private String id;
+  private int batch;
+  private CacheControl cacheControl;
+
+  public ScannerInstanceResource(String table, String id, 
+      ResultGenerator generator, int batch) throws IOException {
+    this.id = id;
+    this.generator = generator;
+    this.batch = batch;
+    cacheControl = new CacheControl();
+    cacheControl.setNoCache(true);
+    cacheControl.setNoTransform(false);
+  }
+
+  @GET
+  @Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_JAVASCRIPT,
+    MIMETYPE_PROTOBUF})
+  public Response get(@Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("GET " + uriInfo.getAbsolutePath());
+    }
+    CellSetModel model = new CellSetModel();
+    RowModel rowModel = null;
+    byte[] rowKey = null;
+    int count = batch;
+    do {
+      KeyValue value = null;
+      try {
+        value = generator.next();
+      } catch (IllegalStateException e) {
+        ScannerResource.delete(id);
+        throw new WebApplicationException(Response.Status.GONE);
+      }
+      if (value == null) {
+        LOG.info("generator exhausted");
+        // respond with 204 (No Content) if an empty cell set would be
+        // returned
+        if (count == batch) {
+          return Response.noContent().build();
+        }
+        break;
+      }
+      if (rowKey == null) {
+        rowKey = value.getRow();
+        rowModel = new RowModel(rowKey);
+      }
+      if (!Bytes.equals(value.getRow(), rowKey)) {
+        model.addRow(rowModel);
+        rowKey = value.getRow();
+        rowModel = new RowModel(rowKey);
+      }
+      rowModel.addCell(
+        new CellModel(value.getColumn(), value.getTimestamp(),
+              value.getValue()));
+    } while (--count > 0);
+    model.addRow(rowModel);
+    ResponseBuilder response = Response.ok(model);
+    response.cacheControl(cacheControl);
+    return response.build();
+  }
+
+  @GET
+  @Produces(MIMETYPE_BINARY)
+  public Response getBinary(@Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("GET " + uriInfo.getAbsolutePath() + " as " +
+        MIMETYPE_BINARY);
+    }
+    try {
+      KeyValue value = generator.next();
+      if (value == null) {
+        LOG.info("generator exhausted");
+        return Response.noContent().build();
+      }
+      ResponseBuilder response = Response.ok(value.getValue());
+      response.cacheControl(cacheControl);
+      response.header("X-Row", Base64.encode(value.getRow()));
+      response.header("X-Column", Base64.encode(value.getColumn()));
+      response.header("X-Timestamp", value.getTimestamp());
+      return response.build();
+    } catch (IllegalStateException e) {
+      ScannerResource.delete(id);
+      throw new WebApplicationException(Response.Status.GONE);
+    }
+  }
+
+  @DELETE
+  public Response delete(@Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+    }
+    ScannerResource.delete(id);
+    return Response.ok().build();
+  }
+}

Added: hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResource.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResource.java?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResource.java (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResource.java Sun Jun 28 18:21:45 2009
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import com.google.protobuf.InvalidProtocolBufferException;
+
+import java.io.IOException;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriBuilder;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.hbase.stargate.model.ScannerModel;
+
+public class ScannerResource implements Constants {
+
+  private static final Log LOG = LogFactory.getLog(ScannerResource.class);
+  protected static final Map<String,ScannerInstanceResource> scanners = 
+    new HashMap<String,ScannerInstanceResource>();
+
+  private String table;
+
+  public ScannerResource(String table) {
+    this.table = table;
+  }
+
+  private Response update(ScannerModel model, boolean replace,
+      UriInfo uriInfo) {
+    try {
+      byte[] endRow = model.hasEndRow() ? model.getEndRow() : null;
+      RowSpec spec = new RowSpec(model.getStartRow(), endRow,
+        model.getColumns(), model.getStartTime(), model.getEndTime(), 1);
+      ScannerResultGenerator gen = new ScannerResultGenerator(table, spec);
+      String id = gen.getID();
+      ScannerInstanceResource instance = 
+        new ScannerInstanceResource(table, id, gen, model.getBatch());
+      synchronized (scanners) {
+        scanners.put(id, instance);
+      }
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("new scanner: " + id);
+      }
+      UriBuilder builder = uriInfo.getAbsolutePathBuilder();
+      URI uri = builder.path(id).build();
+      return Response.created(uri).build();
+    } catch (InvalidProtocolBufferException e) {
+      throw new WebApplicationException(e, Response.Status.BAD_REQUEST);
+    } catch (IOException e) {
+      throw new WebApplicationException(e,
+              Response.Status.SERVICE_UNAVAILABLE);
+    }
+  }
+
+  @PUT
+  @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_JAVASCRIPT,
+    MIMETYPE_PROTOBUF})
+  public Response put(ScannerModel model, @Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("PUT " + uriInfo.getAbsolutePath());
+    }
+    return update(model, true, uriInfo);
+  }
+
+  @POST
+  @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_JAVASCRIPT,
+    MIMETYPE_PROTOBUF})
+  public Response post(ScannerModel model, @Context UriInfo uriInfo) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("POST " + uriInfo.getAbsolutePath());
+    }
+    return update(model, false, uriInfo);
+  }
+
+  @Path("{scanner: .+}")
+  public ScannerInstanceResource getScannerInstanceResource(
+      @PathParam("scanner") String id) {
+    synchronized (scanners) {
+      ScannerInstanceResource instance = scanners.get(id);
+      if (instance == null) {
+        throw new WebApplicationException(Response.Status.NOT_FOUND);
+      }
+      return instance;
+    }
+  }
+
+  static void delete(String id) {
+    synchronized (scanners) {
+      ScannerInstanceResource instance = scanners.remove(id);
+      if (instance != null) {
+        instance.generator.close();
+      }
+    }
+  }
+}

Added: hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResultGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResultGenerator.java?rev=789136&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResultGenerator.java (added)
+++ hadoop/hbase/trunk/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ScannerResultGenerator.java Sun Jun 28 18:21:45 2009
@@ -0,0 +1,140 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.stargate;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.UnknownScannerException;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTablePool;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.util.StringUtils;
+
+public class ScannerResultGenerator extends ResultGenerator {
+  private static final Log LOG =
+    LogFactory.getLog(ScannerResultGenerator.class);
+  
+  private String id;
+  private Iterator<KeyValue> rowI;
+  private ResultScanner scanner;
+  private Result cached;
+
+  public ScannerResultGenerator(String tableName, RowSpec rowspec)
+      throws IllegalArgumentException, IOException {
+    HTablePool pool = RESTServlet.getInstance().getTablePool(tableName); 
+    HTable table = pool.get();
+    try {
+      Scan scan;
+      if (rowspec.hasEndRow()) {
+        scan = new Scan(rowspec.getStartRow(), rowspec.getEndRow());
+      } else {
+        scan = new Scan(rowspec.getStartRow());
+      }
+      if (rowspec.hasColumns()) {
+        scan.addColumns(rowspec.getColumns());
+      } else {
+        for (HColumnDescriptor family: 
+            table.getTableDescriptor().getFamilies()) {
+          scan.addFamily(family.getName());
+        }
+      }
+      scan.setTimeRange(rowspec.getStartTime(), rowspec.getEndTime());          
+      scan.setMaxVersions(rowspec.getMaxVersions());
+      scanner = table.getScanner(scan);
+      cached = null;
+      id = Long.toString(System.currentTimeMillis()) +
+             Integer.toHexString(scanner.hashCode());
+    } finally {
+      pool.put(table);
+    }
+  }
+
+  public String getID() {
+    return id;
+  }
+
+  public void close() {
+  }
+
+  public boolean hasNext() {
+    if (rowI != null && rowI.hasNext()) {
+      return true;
+    }
+    if (cached != null) {
+      return true;
+    }
+    try {
+      Result result = scanner.next();
+      if (result != null && !result.isEmpty()) {
+        cached = result;
+      }
+    } catch (UnknownScannerException e) {
+      throw new IllegalArgumentException(e);
+    } catch (IOException e) {
+      LOG.error(StringUtils.stringifyException(e));
+    }
+    return cached != null;
+  }
+
+  public KeyValue next() {
+    boolean loop;
+    do {
+      loop = false;
+      if (rowI != null) {
+        if (rowI.hasNext()) {
+          return rowI.next();
+        } else {
+          rowI = null;
+        }
+      }
+      if (cached != null) {
+        rowI = cached.list().iterator();
+        loop = true;
+        cached = null;
+      } else {
+        Result result = null;
+        try {
+          result = scanner.next();
+        } catch (UnknownScannerException e) {
+          throw new IllegalArgumentException(e);
+        } catch (IOException e) {
+          LOG.error(StringUtils.stringifyException(e));
+        }
+        if (result != null && !result.isEmpty()) {
+          rowI = result.list().iterator();
+          loop = true;
+        }
+      }
+    } while (loop);
+    return null;
+  }
+
+  public void remove() {
+    throw new UnsupportedOperationException("remove not supported");
+  }
+}



Mime
View raw message