incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tof...@apache.org
Subject svn commit: r1371536 [1/2] - in /incubator/hcatalog/branches/branch-0.4: ./ webhcat/ webhcat/java-client/ webhcat/java-client/src/ webhcat/java-client/src/main/ webhcat/java-client/src/main/java/ webhcat/java-client/src/main/java/org/ webhcat/java-clie...
Date Fri, 10 Aug 2012 00:33:09 GMT
Author: toffer
Date: Fri Aug 10 00:33:08 2012
New Revision: 1371536

URL: http://svn.apache.org/viewvc?rev=1371536&view=rev
Log:
backported from trunk: HCAT-419 Java APIs for HCatalog DDL commands (avandana via fcliu)

Added:
    incubator/hcatalog/branches/branch-0.4/webhcat/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/build.xml
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/ivy.xml
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/ConnectionFailureException.java
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatAddPartitionDesc.java
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClient.java
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateDBDesc.java
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatDatabase.java
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatPartition.java
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatTable.java
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/test/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/test/java/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/test/java/org/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/test/java/org/apache/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/test/java/org/apache/hcatalog/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/test/java/org/apache/hcatalog/api/
    incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java
Modified:
    incubator/hcatalog/branches/branch-0.4/CHANGES.txt
    incubator/hcatalog/branches/branch-0.4/build.xml

Modified: incubator/hcatalog/branches/branch-0.4/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/CHANGES.txt?rev=1371536&r1=1371535&r2=1371536&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/CHANGES.txt (original)
+++ incubator/hcatalog/branches/branch-0.4/CHANGES.txt Fri Aug 10 00:33:08 2012
@@ -23,6 +23,8 @@ Trunk (unreleased changes)
   INCOMPATIBLE CHANGES
 
   NEW FEATURES
+  HCAT-419 Java APIs for HCatalog DDL commands (avandana via fcliu)
+
   HCAT-328 HCatLoader should report its input size so pig can estimate the number of reducers (traviscrawford via gates)
   
   HCAT-427 Document storage-based authorization (lefty via gates)

Modified: incubator/hcatalog/branches/branch-0.4/build.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/build.xml?rev=1371536&r1=1371535&r2=1371536&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/build.xml (original)
+++ incubator/hcatalog/branches/branch-0.4/build.xml Fri Aug 10 00:33:08 2012
@@ -306,7 +306,7 @@
   Build both clientjar and server-extensions
   ================================================================================
   -->
-  <target name="jar" depends="clientjar,server-extensions,jar-storage-handlers"/>
+  <target name="jar" depends="clientjar,server-extensions,jar-storage-handlers,jar-webhcat-java-client"/>
 
   <!--
   ================================================================================
@@ -320,6 +320,16 @@
 
   <!--
   ================================================================================
+  Build WebHCatalog java client.
+  ================================================================================
+  -->
+
+  <target name="jar-webhcat-java-client">    
+    <ant target="jar" dir="webhcat/java-client" inheritAll="false"/>
+  </target>
+
+  <!--
+  ================================================================================
   Test Section
   ================================================================================
   -->
@@ -383,6 +393,8 @@
     </sequential>
     <!-- test storage handlers -->
     <antcall target="test-storage-handlers"/>
+    <!-- Test web hcatalog java client -->
+   <ant target="test" dir="webhcat/java-client" inheritAll="false"/>
   </target>
   
   <!--
@@ -487,6 +499,7 @@
     <delete dir="${build.dir}" />
     <delete dir="${test.warehouse.dir}"/>
     <ant target="clean" dir="storage-handlers" inheritAll="false" useNativeBasedir="true"/>
+    <ant target="clean" dir="webhcat/java-client" inheritAll="false"/>
   </target>
  
   <!--
@@ -520,6 +533,7 @@
              doctitle="HCatalog ${hcatalog.version} API"
              failonerror="true">
       <packageset dir="${src.dir}" />
+      <packageset dir="webhcat/java-client/src/main/java" />
         <classpath>
           <path refid="classpath" />
         </classpath>
@@ -545,6 +559,19 @@
     </copy>
   </target>
 
+  <target name="package-webhcat-java-client">
+    <property name="javaclient.dir" value="${dist.dir}/share/hcatalog/webhcat/java-client"/>
+    <mkdir dir="${javaclient.dir}"/>
+    <ant target="package" dir="webhcat/java-client" inheritAll="false">
+        <property name="dist.javaclient.dir" value="${javaclient.dir}"/>
+    </ant>
+    <copy todir="${dist.dir}/share/${ant.project.name}/lib" includeEmptyDirs="false" flatten="true">
+      <fileset dir="${dist.dir}/share/${ant.project.name}/webhcat/java-client">
+        <include name="*/lib/*"/>
+      </fileset>
+    </copy>
+  </target>
+
   <target name="package" depends="jar, docs" description="Create an HCatalog release">
     <mkdir dir="${dist.dir}" />
     <mkdir dir="${dist.dir}/share/${ant.project.name}/lib" />
@@ -624,6 +651,7 @@
     </chmod>
        <!--package storage-handlers -->
       <antcall target="package-storage-handlers"/>
+      <antcall target="package-webhcat-java-client"/>
     </target>
 
     <target name="releaseaudit" depends="ivy-releaseaudit" description="Release Audit activities">
@@ -647,6 +675,7 @@
                 <include name="shims/**"/>
                 <include name="src/**"/>
                 <include name="storage-handlers/**"/>
+                <include name="webhcat/**"/>
                 <include name="*.txt"/>
                 <include name="*.xml"/>
                 <include name="KEYS"/>

Added: incubator/hcatalog/branches/branch-0.4/webhcat/java-client/build.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/webhcat/java-client/build.xml?rev=1371536&view=auto
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/webhcat/java-client/build.xml (added)
+++ incubator/hcatalog/branches/branch-0.4/webhcat/java-client/build.xml Fri Aug 10 00:33:08 2012
@@ -0,0 +1,289 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<project name="webhcat-java-client" xmlns:ivy="antlib:org.apache.ivy.ant">
+  <property name="hcatalog.version" value="0.4.1"/>
+  <property name="path.to.basedir" location="${basedir}/../.."/>
+  <property name="lib.dir" value="${basedir}/lib/" />
+  <property name="src.dir"  location="${basedir}/src/main/java"/>
+  <property name="docs.src" value="${basedir}/src/docs"/>
+  <property name="build.dir" value="${basedir}/build"/>
+  <property name="build.classes" value="${build.dir}/classes" />
+  <property name="build.docs" value="${build.dir}/docs" />
+  <property name="build.javadoc" value="${build.docs}/api" />
+  <property name="dist.dir" value="${build.dir}/${ant.project.name}" />
+  <property name="jar.name" value="${ant.project.name}.jar" /> 
+ 
+  <!-- javac properties -->
+  <property name="build.encoding" value="UTF8" />
+  <property name="excludes" value=""/>
+  <property name="javac.debug" value="on" />
+  <property name="javac.optimize" value="on" />
+  <property name="javac.deprecation" value="off" />
+  <property name="javac.version" value="1.6" />
+  <property name="javac.args" value="" />
+  
+    <!-- test properties -->
+  <property name="test.src.dir" value="${basedir}/src/test" />
+  <property name="test.build.dir" value="${build.dir}/test" />
+  <property name="test.build.classes" value="${test.build.dir}/classes" />
+  <property name="test.log.dir" value="${test.build.dir}/logs" />
+  <property name="test.timeout" value="2700000" />
+  <property name="test.junit.output.format" value="plain" />
+  <property name="test.output" value="no"/>
+  <property name="test.warehouse.dir" value="/tmp/hcat_junit_warehouse"/>  
+ 
+  <property name="ivy.repo.dir" value="${user.home}/ivyrepo" />
+  <property name="ivy.dir" location="${path.to.basedir}/ivy" />
+  <loadproperties srcfile="${path.to.basedir}/ivy/libraries.properties"/>
+  <property name="asfrepo" value="https://repository.apache.org"/>
+  <property name="asfsnapshotrepo" value="${asfrepo}/content/repositories/snapshots"/>
+  <property name="mvnrepo" value="http://repo2.maven.org/maven2"/>
+  <property name="asfstagingrepo" value="${asfrepo}/service/local/staging/deploy/maven2"/>
+  <property name="ivy.jar" location="${ivy.dir}/ivy-${ivy.version}.jar"/>
+  <property name="ant_task.jar" location="${ivy.dir}/maven-ant-tasks-${ant-task.version}.jar"/>
+  <property name="ant_task_repo_url"
+              value="${mvnrepo}/org/apache/maven/maven-ant-tasks/${ant-task.version}/maven-ant-tasks-${ant-task.version}.jar"/>
+  <property name="ivy_repo_url" value="${mvnrepo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"/>
+  <property name="ivysettings.xml" location="${ivy.dir}/ivysettings.xml" />
+  <property name="build.ivy.dir" location="${build.dir}/ivy" />
+  <property name="build.ivy.lib.dir" location="${build.ivy.dir}/lib" />
+  <property name="ivy.lib.dir" location="${build.ivy.lib.dir}/${ant.project.name}"/>
+  <property name="build.ivy.report.dir" location="${build.ivy.dir}/report" />
+
+  <!-- rats properties -->
+  <property name="rat.reporting.classname" value="rat.Report"/>
+
+  <!--this is the naming policy for artifacts we want pulled down-->
+  <property name="ivy.artifact.retrieve.pattern" value="${ant.project.name}/[artifact]-[revision](-[classifier]).[ext]"/>
+
+  <import file="../../build-common.xml"/>
+  
+  <target name="init" depends="ivy-compile" > 
+        <mkdir dir="${dist.dir}" />
+        <mkdir dir="${build.classes}" />
+        <mkdir dir="${test.build.classes}" />
+  </target>
+   
+   <!--
+    ================================================================================
+    Main Build and Jar Section
+    ================================================================================
+    -->
+    <!-- Compile src files -->
+    <path id="compile.class.path">
+    	<fileset dir="${build.dir}/ivy/lib/webhcat-java-client" includes="**/*.jar"/>
+	<dirset dir="../../build/classes"/>
+    </path>
+    <target name="compile-src" depends="init">
+        <javac encoding="${build.encoding}" srcdir="${src.dir}:${basedir}/src/main/java" excludes="${excludes}"
+               includes="**/*.java" destdir="${build.classes}" debug="${javac.debug}"
+               optimize="${javac.optimize}" target="${javac.version}"
+               source="${javac.version}" deprecation="${javac.deprecation}"
+               includeantruntime="false">
+            <compilerarg line="${javac.args}"/>
+            <classpath refid="compile.class.path" />
+        </javac>
+    </target>
+
+    <!-- Build the jar -->
+    <target name="jar" depends="compile-src">
+    	<echo message="${ant.project.name}"/>
+    	<jar jarfile="${build.dir}/${jar.name}" basedir="${build.classes}"/>
+    </target>
+
+  <!--
+    ================================================================================
+    Test Section
+    ================================================================================
+  -->
+    <!-- Build test files -->
+    <path id="test.class.path">
+     <fileset dir="${build.dir}/ivy/lib" includes="**/*.jar"/>
+     <dirset dir="${path.to.basedir}/build/test/classes"/>
+     <dirset dir="${basedir}/build/classes"/>
+     <dirset dir="${basedir}/build/test/classes"/>
+     <dirset dir="${path.to.basedir}/build/classes"/>
+   </path>
+
+    <target name="compile-test" depends="jar">
+        <javac encoding="${build.encoding}" srcdir="${test.src.dir}" excludes="${excludes}"
+               includes="**/*.java" destdir="${test.build.classes}" debug="${javac.debug}"
+               optimize="${javac.optimize}" target="${javac.version}"
+               source="${javac.version}" deprecation="${javac.deprecation}"
+               includeantruntime="false">
+            <compilerarg line="${javac.args}"/>
+            <classpath refid="test.class.path" />
+        </javac>
+    </target>
+
+  <path id="findbugs.class.path">
+    <fileset dir="${build.dir}/ivy/lib/default" includes="*.jar"/>
+  </path>
+  
+  <target name="clean">
+    <echo message="${ant.project.name}"/>
+    <delete dir="${build.dir}"/>
+    <delete dir="${test.warehouse.dir}"/>
+  </target>
+
+  <!-- Run the unit tests -->
+    <target name="test" depends="compile-test">
+        <delete dir="${test.warehouse.dir}"/>
+	<delete dir="${test.log.dir}"/>
+	<mkdir dir="${test.warehouse.dir}"/>
+	<mkdir dir="${test.log.dir}"/>
+        <sequential>
+            <delete dir="${test.log.dir}"/>
+            <mkdir dir="${test.log.dir}"/>
+                <junit showoutput="${test.output}" printsummary="yes" haltonfailure="no"
+                   fork="yes" maxmemory="512m" dir="${basedir}" timeout="${test.timeout}"
+                   errorProperty="tests.failed" failureProperty="tests.failed">
+		<sysproperty key="hive.metastore.warehouse.dir" value="${test.warehouse.dir}"/>
+                <classpath>
+                   <!-- <pathelement location="${test.build.classes}" />
+                    <pathelement location="." /> i-->
+                    <path refid="test.class.path"/>
+                </classpath>
+                <formatter type="${test.junit.output.format}" />
+                <!-- If the user has not defined a particular test to run, run them all -->
+                <batchtest fork="yes" todir="${test.log.dir}" unless="testcase">
+                    <fileset dir="src/test/java" includes="**/*.java"/>
+                </batchtest>
+                <!-- Run one test case.  To use this define -Dtestcase=X on the command line -->
+                <batchtest fork="yes" todir="${test.log.dir}" if="testcase">
+                    <fileset dir="src/test/java" includes="**/*${testcase}.java"/>
+                </batchtest>
+                <assertions>
+                    <enable />
+                </assertions>
+            </junit>
+            <fail if="tests.failed">Tests failed!</fail>
+        </sequential>
+    </target>
+
+    <!--
+    ================================================================================
+    Ivy Section
+    ================================================================================
+    -->
+    <!-- Ivy goop stolen directly from Pig's build.xml -->
+    <target name="ivy-init-dirs">
+        <mkdir dir="${build.ivy.dir}" />
+        <mkdir dir="${build.ivy.lib.dir}" />
+        <mkdir dir="${build.ivy.report.dir}" />
+    </target>
+
+    <target name="ivy-probe-antlib">
+        <condition property="ivy.found">
+            <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
+        </condition>
+    </target>
+
+    <target name="ivy-download" description="To download ivy" unless="offline">
+        <get src="${ivy_repo_url}" dest="${ivy.jar}" usetimestamp="true"/>
+    </target>
+
+    <!--
+       To avoid Ivy leaking things across big projects, always load Ivy in the same classloader.
+       Also note how we skip loading Ivy if it is already there, just to make sure all is well.
+    -->
+    <target name="ivy-init-antlib" depends="ivy-download,ivy-init-dirs,ivy-probe-antlib" unless="ivy.found">
+        <typedef uri="antlib:org.apache.ivy.ant" onerror="fail" loaderRef="ivyLoader">
+            <classpath>
+                <pathelement location="${ivy.jar}"/>
+            </classpath>
+        </typedef>
+        <fail>
+            <condition >
+                <not>
+                    <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
+                </not>
+            </condition>
+            You need Apache Ivy 2.0 or later from http://ant.apache.org/
+            It could not be loaded from ${ivy_repo_url}
+        </fail>
+    </target>
+
+    <target name="ivy-init" depends="ivy-init-antlib" >
+        <!--Configure Ivy by reading in the settings file
+            If anyone has already read in a settings file into this settings ID, it gets priority
+        -->
+        <ivy:configure settingsid="${ant.project.name}.ivy.settings"
+                       file="${ivysettings.xml}" override='false'/>
+    </target>
+   <target name="ivy-compile" depends="ivy-init" description="Resolve, Retrieve Ivy-managed artifacts for compile configuration">
+        <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="common"/>
+        <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+                      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" conf="common"/>
+        <ivy:cachepath pathid="compile.classpath" conf="common"/>
+        <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="default"/>
+        <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+                      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+                      conf="default"/>
+        <ivy:cachepath pathid="compile.classpath" conf="default"/>
+    </target>
+
+    <target name="ivy-releaseaudit" depends="ivy-init" description="Resolve, Retrieve Ivy-managed artifacts for releaseaudit configuration">
+        <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="releaseaudit"/>
+        <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+                      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" conf="releaseaudit"/>
+        <ivy:cachepath pathid="releaseaudit.classpath" conf="releaseaudit"/>
+    </target>
+
+   <!--
+    ===============================================================================
+    Distribution Section
+    ===============================================================================
+    -->
+    <target name="package" depends="jar, docs" description="Create an HCatalog release">
+        <mkdir dir="${dist.dir}" />
+        <mkdir dir="${dist.dir}/lib" />
+        <mkdir dir="${dist.dir}/docs" />
+
+        <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
+            <fileset dir="${lib.dir}" erroronmissingdir="false" />
+            <fileset dir="${build.dir}">
+                <include name="*.jar"/>
+            </fileset>
+        </copy>
+        <copy todir="${dist.dir}/docs">
+            <fileset dir="${build.docs}" />
+        </copy>
+    </target>
+
+   <!--
+    ================================================================================
+    Docs Section
+    ================================================================================
+    -->
+    <target name="docs" depends="javadoc">
+    </target>
+
+    <target name="javadoc" depends="jar" description="Create documentation">
+        <mkdir dir="${build.javadoc}" />
+        <javadoc packagenames="org.apache.hcatalog.*" destdir="${build.javadoc}" author="true" version="true" use="true"
+        windowtitle="HCatalog ${hcatalog.version} API" doctitle="HCatalog ${hcatalog.version} API" sourcepath="${src.dir}">
+            <!--<packageset dir="${src.dir}" /> -->
+            <classpath>
+                <path refid="compile.class.path" />
+            </classpath>
+            <group title="hcatalog" packages="org.apache.hcatalog.*" />
+        </javadoc>
+    </target>
+</project>

Added: incubator/hcatalog/branches/branch-0.4/webhcat/java-client/ivy.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/webhcat/java-client/ivy.xml?rev=1371536&view=auto
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/webhcat/java-client/ivy.xml (added)
+++ incubator/hcatalog/branches/branch-0.4/webhcat/java-client/ivy.xml Fri Aug 10 00:33:08 2012
@@ -0,0 +1,104 @@
+<!-- Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+ 
+  http://www.apache.org/licenses/LICENSE-2.0
+ 
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. -->
+
+<ivy-module version="2.0" xmlns:m="http://ant.apache.org/ivy/maven">
+    <info organisation="org.apache.hcatalog" module="${ant.project.name}"
+          revision="${hcatalog.version}">
+        <license name="Apache 2.0"/>
+        <ivyauthor name="Apache HCatalog Team" url="http://incubator.apache.org/hcatalog"/>
+        <description>
+            Apache HCatalog
+        </description>
+    </info>
+
+    <configurations defaultconfmapping="default">
+        <!--these match the Maven configurations-->
+        <conf name="default" extends="master,runtime"/>
+        <conf name="master" description="contains the artifact but no dependencies"/>
+        <conf name="runtime" description="runtime but not the artifact" />
+        <conf name="common" visibility="private"
+              extends="runtime"
+              description="artifacts needed to compile/test the application"/>
+        <conf name="test" visibility="private" extends="runtime"/>
+        <conf name="releaseaudit" visibility="private"/>
+    </configurations>
+    <dependencies>
+        <dependency org="org.apache.hadoop" name="hadoop-core"
+          rev="${hadoop-core.version}" conf="common->master" />
+        <dependency org="org.apache.hadoop" name="hadoop-test"
+          rev="${hadoop-test.version}" conf="common->master" />
+        <dependency org="org.apache.hive" name="hive-metastore"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="org.apache.hive" name="hive-common"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="org.apache.hive" name="hive-cli"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="org.apache.hive" name="hive-exec"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="junit" name="junit" rev="${junit.version}"
+          conf="common->master"/>
+        <dependency org="commons-lang" name="commons-lang" rev="${commons-lang.version}"
+          conf="common->master"/>
+        <dependency org="commons-logging" name="commons-logging"
+          rev="${commons-logging.version}" conf="common->master"/>
+        <dependency org="commons-logging" name="commons-logging-api"
+          rev="${commons-logging.version}" conf="common->master"/>
+
+        <!-- needed to run tests -->
+        <dependency org="commons-configuration" name="commons-configuration"
+          rev="${commons-configuration.version}" conf="default"/>
+	<dependency org="commons-cli" name="commons-cli" rev="${commons-cli.version}" />
+        <dependency org="org.codehaus.jackson" name="jackson-mapper-asl"
+          rev="${jackson.version}" conf="common->master"/>
+        <dependency org="org.codehaus.jackson" name="jackson-core-asl"
+          rev="${jackson.version}" conf="common->master"/>
+        <dependency org="org.slf4j" name="slf4j-api" rev="${slf4j.version}"
+          conf="common->master"/>
+        <dependency org="org.slf4j" name="slf4j-log4j12" rev="${slf4j.version}"
+          conf="common->master"/>
+        <dependency org="log4j" name="log4j" rev="${log4j.version}"
+          conf="common->master"/>
+        <dependency org="javax.jdo" name="jdo2-api" rev="${jdo.version}"
+          conf="default"/>
+        <dependency org="org.datanucleus" name="datanucleus-core"
+          rev="${datanucleus-core.version}" conf="default"/>
+        <dependency org="org.datanucleus" name="datanucleus-connectionpool"
+          rev="${datanucleus-connectionpool.version}" conf="default"/>
+        <dependency org="org.datanucleus" name="datanucleus-enhancer"
+          rev="${datanucleus-enhancer.version}" conf="default"/>
+        <dependency org="org.datanucleus" name="datanucleus-rdbms"
+          rev="${datanucleus-rdbms.version}" conf="default"/>
+        <dependency org="commons-dbcp" name="commons-dbcp" rev="${commons-dbcp.version}"
+            conf="common->master">
+          <exclude module="commons-pool" />
+          <exclude org="org.apache.geronimo.specs" module="geronimo-jta_1.1_spec"/>
+        </dependency> 
+        <dependency org="commons-pool" name="commons-pool" rev="${commons-pool.version}"
+          conf="default"/> 
+        <dependency org="org.apache.derby" name="derby" rev="${derby.version}"
+          conf="default"/>
+        <dependency org="commons-configuration" name="commons-configuration"
+          rev="${commons-configuration.version}" conf="default"/>
+        <dependency org="commons-httpclient" name="commons-httpclient"
+          rev="${commons-httpclient.version}" conf="default"/>
+        <dependency org="org.apache.hive" name="hive-builtins"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="org.apache.thrift" name="libfb303" rev="${fb303.version}"
+          conf="common->master"/>
+        <dependency org="org.antlr" name="antlr-runtime" rev="${antlr.version}"
+          conf="common->master" />
+    </dependencies>
+</ivy-module>

Added: incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/ConnectionFailureException.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/ConnectionFailureException.java?rev=1371536&view=auto
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/ConnectionFailureException.java (added)
+++ incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/ConnectionFailureException.java Fri Aug 10 00:33:08 2012
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hcatalog.api;
+
+import org.apache.hcatalog.common.HCatException;
+/**
+ * Class representing exceptions resulting from connection problems
+ * between HCat client and server.
+ */
+public class ConnectionFailureException extends HCatException {
+
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * @param message
+     * @param cause
+     */
+    public ConnectionFailureException(String message, Throwable cause) {
+        super(message, cause);
+    }
+
+}

Added: incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatAddPartitionDesc.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatAddPartitionDesc.java?rev=1371536&view=auto
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatAddPartitionDesc.java (added)
+++ incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatAddPartitionDesc.java Fri Aug 10 00:33:08 2012
@@ -0,0 +1,184 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hcatalog.api;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hcatalog.common.HCatException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * The Class HCatAddPartitionDesc helps users in defining partition attributes.
+ */
+public class HCatAddPartitionDesc {
+
+    private static final Logger LOG = LoggerFactory.getLogger(HCatAddPartitionDesc.class);
+    private String tableName;
+    private String dbName;
+    private String location;
+    private Map<String,String> partSpec;
+
+     private HCatAddPartitionDesc(String dbName, String tbl, String loc, Map<String,String> spec){
+         this.dbName = dbName;
+         this.tableName = tbl;
+         this.location = loc;
+         this.partSpec = spec;
+     }
+
+    /**
+     * Gets the location.
+     *
+     * @return the location
+     */
+    public String getLocation() {
+        return this.location;
+    }
+
+
+    /**
+     * Gets the partition spec.
+     *
+     * @return the partition spec
+     */
+    public Map<String, String> getPartitionSpec() {
+        return this.partSpec;
+    }
+
+    /**
+     * Gets the table name.
+     *
+     * @return the table name
+     */
+    public String getTableName() {
+        return this.tableName;
+    }
+
+    /**
+     * Gets the database name.
+     *
+     * @return the database name
+     */
+    public String getDatabaseName() {
+        return this.dbName;
+    }
+
+     @Override
+    public String toString() {
+        return "HCatAddPartitionDesc ["
+                + (tableName != null ? "tableName=" + tableName + ", " : "tableName=null")
+                + (dbName != null ? "dbName=" + dbName + ", " : "dbName=null")
+                + (location != null ? "location=" + location + ", " : "location=null")
+                + (partSpec != null ? "partSpec=" + partSpec : "partSpec=null") + "]";
+    }
+
+    /**
+      * Creates the builder for specifying attributes.
+      *
+      * @param dbName the db name
+      * @param tableName the table name
+      * @param location the location
+      * @param partSpec the part spec
+      * @return the builder
+      * @throws HCatException
+      */
+     public static Builder create(String dbName, String tableName, String location,
+             Map<String,String> partSpec) throws HCatException {
+         return new Builder(dbName, tableName, location, partSpec);
+     }
+
+     Partition toHivePartition(Table hiveTable) throws HCatException{
+         Partition hivePtn = new Partition();
+         hivePtn.setDbName(this.dbName);
+         hivePtn.setTableName(this.tableName);
+
+         List<String> pvals = new ArrayList<String>();
+         for (FieldSchema field : hiveTable.getPartitionKeys()) {
+           String val = partSpec.get(field.getName());
+           if (val == null || val.length() == 0) {
+               throw new HCatException("create partition: Value for key "
+                   + field.getName() + " is null or empty");
+             }
+           pvals.add(val);
+         }
+
+         hivePtn.setValues(pvals);
+         StorageDescriptor sd = new StorageDescriptor(hiveTable.getSd());
+         hivePtn.setSd(sd);
+         hivePtn.setParameters(hiveTable.getParameters());
+         if (this.location != null) {
+             hivePtn.getSd().setLocation(this.location);
+         } else {
+             String partName;
+            try {
+                partName = Warehouse.makePartName(
+                         hiveTable.getPartitionKeys(), pvals);
+                LOG.info("Setting partition location to :" + partName);
+            } catch (MetaException e) {
+                throw new HCatException("Exception while creating partition name.", e);
+            }
+             Path partPath = new Path(hiveTable.getSd().getLocation(), partName);
+             hivePtn.getSd().setLocation(partPath.toString());
+         }
+         hivePtn.setCreateTime((int) (System.currentTimeMillis() / 1000));
+         hivePtn.setLastAccessTimeIsSet(false);
+         return hivePtn;
+     }
+
+     public static class Builder {
+
+         private String tableName;
+         private String location;
+         private Map<String,String> values;
+         private String dbName;
+
+         private Builder(String dbName, String tableName, String location, Map<String,String> values){
+             this.dbName = dbName;
+             this.tableName = tableName;
+             this.location = location;
+             this.values = values;
+         }
+
+         /**
+          * Builds the HCatAddPartitionDesc.
+          *
+          * @return the h cat add partition desc
+          * @throws HCatException
+          */
+         public HCatAddPartitionDesc build() throws HCatException {
+             if(this.dbName == null){
+                 this.dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
+             }
+             HCatAddPartitionDesc desc = new HCatAddPartitionDesc(
+                     this.dbName, this.tableName, this.location,
+                     this.values);
+             return desc;
+         }
+     }
+
+}

Added: incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClient.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClient.java?rev=1371536&view=auto
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClient.java (added)
+++ incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClient.java Fri Aug 10 00:33:08 2012
@@ -0,0 +1,317 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hcatalog.api;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.metastore.api.PartitionEventType;
+import org.apache.hcatalog.common.HCatException;
+
+/**
+ * The abstract class HCatClient containing APIs for HCatalog DDL commands.
+ */
+public abstract class HCatClient {
+
+    public enum DROP_DB_MODE { RESTRICT, CASCADE };
+    public static final String HCAT_CLIENT_IMPL_CLASS = "hcat.client.impl.class";
+    /**
+     * Creates an instance of HCatClient.
+     *
+     * @param conf An instance of configuration.
+     * @return An instance of HCatClient.
+     * @throws HCatException,ConnectionFailureException
+     */
+    public static HCatClient create(Configuration conf) throws HCatException,
+            ConnectionFailureException {
+        HCatClient client = null;
+        String className = conf.get(HCAT_CLIENT_IMPL_CLASS,
+                HCatClientHMSImpl.class.getName());
+        try {
+            Class<? extends HCatClient> clientClass = Class.forName(className,
+                    true, JavaUtils.getClassLoader()).asSubclass(
+                    HCatClient.class);
+            client = (HCatClient) clientClass.newInstance();
+        } catch (ClassNotFoundException e) {
+            throw new HCatException(
+                    "ClassNotFoundException while creating client class.", e);
+        } catch (InstantiationException e) {
+            throw new HCatException(
+                    "InstantiationException while creating client class.", e);
+        } catch (IllegalAccessException e) {
+            throw new HCatException(
+                    "IllegalAccessException while creating client class.", e);
+        }
+        if(client != null){
+            client.initialize(conf);
+        }
+        return client;
+    }
+
+    abstract void initialize(Configuration conf) throws HCatException,ConnectionFailureException;
+
+    /**
+     * Get all existing databases that match the given
+     * pattern. The matching occurs as per Java regular expressions
+     *
+     * @param databasePattern  java re pattern
+     * @return list of database names
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract List<String> listDatabaseNamesByPattern(String pattern)
+            throws HCatException, ConnectionFailureException;
+
+    /**
+     * Gets the database.
+     *
+     * @param dbName The name of the database.
+     * @return An instance of HCatDatabaseInfo.
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract HCatDatabase getDatabase(String dbName) throws HCatException,ConnectionFailureException;
+
+    /**
+     * Creates the database.
+     *
+     * @param dbInfo An instance of HCatCreateDBDesc.
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract void createDatabase(HCatCreateDBDesc dbInfo)
+            throws HCatException,ConnectionFailureException;
+
+    /**
+     * Drops a database.
+     *
+     * @param dbName The name of the database to delete.
+     * @param ifExists Hive returns an error if the database specified does not exist,
+     *                 unless ifExists is set to true.
+     * @param mode This is set to either "restrict" or "cascade". Restrict will
+     *             remove the schema if all the tables are empty. Cascade removes
+     *             everything including data and definitions.
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract void dropDatabase(String dbName, boolean ifExists,
+            DROP_DB_MODE mode) throws HCatException, ConnectionFailureException;
+
+    /**
+     * Returns all existing tables from the specified database which match the given
+     * pattern. The matching occurs as per Java regular expressions.
+     * @param dbName
+     * @param tablePattern
+     * @return list of table names
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract List<String> listTableNamesByPattern(String dbName, String tablePattern)
+            throws HCatException,ConnectionFailureException;
+
+    /**
+     * Gets the table.
+     *
+     * @param dbName The name of the database.
+     * @param tableName The name of the table.
+     * @return An instance of HCatTableInfo.
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract HCatTable getTable(String dbName, String tableName)
+            throws HCatException,ConnectionFailureException;
+
+    /**
+     * Creates the table.
+     *
+     * @param createTableDesc An instance of HCatCreateTableDesc class.
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract void createTable(HCatCreateTableDesc createTableDesc)
+            throws HCatException,ConnectionFailureException;
+
+    /**
+     * Creates the table like an existing table.
+     *
+     * @param dbName The name of the database.
+     * @param existingTblName The name of the existing table.
+     * @param newTableName The name of the new table.
+     * @param ifNotExists If true, then error related to already table existing is skipped.
+     * @param isExternal Set to "true", if table has be created at a different
+     *                   location other than default.
+     * @param location The location for the table.
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract void createTableLike(String dbName, String existingTblName,
+            String newTableName, boolean ifNotExists, boolean isExternal,
+            String location) throws HCatException,ConnectionFailureException;
+
+    /**
+     * Drop table.
+     *
+     * @param dbName The name of the database.
+     * @param tableName The name of the table.
+     * @param ifExists Hive returns an error if the database specified does not exist,
+     *                 unless ifExists is set to true.
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract void dropTable(String dbName, String tableName,
+            boolean ifExists) throws HCatException,ConnectionFailureException;
+
+    /**
+     * Renames a table.
+     *
+     * @param dbName The name of the database.
+     * @param oldName The name of the table to be renamed.
+     * @param newName The new name of the table.
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract void renameTable(String dbName, String oldName,
+            String newName) throws HCatException, ConnectionFailureException;
+
+    /**
+     * Gets all the partitions.
+     *
+     * @param dbName The name of the database.
+     * @param tblName The name of the table.
+     * @return A list of partitions.
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract List<HCatPartition> getPartitions(String dbName, String tblName)
+            throws HCatException,ConnectionFailureException;
+
+    /**
+     * Gets the partition.
+     *
+     * @param dbName The database name.
+     * @param tableName The table name.
+     * @param partitionSpec The partition specification, {[col_name,value],[col_name2,value2]}.
+     * @return An instance of HCatPartitionInfo.
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract HCatPartition getPartition(String dbName, String tableName,
+            Map<String,String> partitionSpec) throws HCatException,ConnectionFailureException;
+
+    /**
+     * Adds the partition.
+     *
+     * @param partInfo An instance of HCatAddPartitionDesc.
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract void addPartition(HCatAddPartitionDesc partInfo)
+            throws HCatException, ConnectionFailureException;
+
+    /**
+     * Adds a list of partitions.
+     *
+     * @param partInfoList A list of HCatAddPartitionDesc.
+     * @return
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract int addPartitions(List<HCatAddPartitionDesc> partInfoList)
+            throws HCatException, ConnectionFailureException;
+
+    /**
+     * Drops partition.
+     *
+     * @param dbName The database name.
+     * @param tableName The table name.
+     * @param partitionSpec The partition specification, {[col_name,value],[col_name2,value2]}.
+     * @param ifExists Hive returns an error if the partition specified does not exist, unless ifExists is set to true.
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract void dropPartition(String dbName, String tableName,
+            Map<String, String> partitionSpec, boolean ifExists)
+            throws HCatException, ConnectionFailureException;
+
+    /**
+     * List partitions by filter.
+     *
+     * @param dbName The database name.
+     * @param tblName The table name.
+     * @param filter The filter string,
+     *    for example "part1 = \"p1_abc\" and part2 <= "\p2_test\"". Filtering can
+     *    be done only on string partition keys.
+     * @return list of partitions
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract List<HCatPartition> listPartitionsByFilter(String dbName, String tblName,
+            String filter) throws HCatException,ConnectionFailureException;
+
+    /**
+     * Mark partition for event.
+     *
+     * @param dbName The database name.
+     * @param tblName The table name.
+     * @param partKVs the key-values associated with the partition.
+     * @param eventType the event type
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract void markPartitionForEvent(String dbName, String tblName,
+            Map<String, String> partKVs, PartitionEventType eventType)
+            throws HCatException,ConnectionFailureException;
+
+    /**
+     * Checks if a partition is marked for event.
+     *
+     * @param dbName the db name
+     * @param tblName the table name
+     * @param partKVs the key-values associated with the partition.
+     * @param eventType the event type
+     * @return true, if is partition marked for event
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract boolean isPartitionMarkedForEvent(String dbName, String tblName,
+            Map<String, String> partKVs, PartitionEventType eventType)
+            throws HCatException,ConnectionFailureException;
+
+    /**
+     * Gets the delegation token.
+     *
+     * @param owner the owner
+     * @param renewerKerberosPrincipalName the renewer kerberos principal name
+     * @return the delegation token
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract String getDelegationToken(String owner,
+            String renewerKerberosPrincipalName) throws HCatException,
+            ConnectionFailureException;
+
+    /**
+     * Renew delegation token.
+     *
+     * @param tokenStrForm the token string
+     * @return the new expiration time
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract long renewDelegationToken(String tokenStrForm)
+            throws HCatException, ConnectionFailureException;
+
+    /**
+     * Cancel delegation token.
+     *
+     * @param tokenStrForm the token string
+     * @throws HCatException,ConnectionFailureException
+     */
+    public abstract void cancelDelegationToken(String tokenStrForm)
+            throws HCatException, ConnectionFailureException;
+
+    /**
+     * Close the hcatalog client.
+     *
+     * @throws HCatException
+     */
+    public abstract void close() throws HCatException;
+}

Added: incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java?rev=1371536&view=auto
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java (added)
+++ incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java Fri Aug 10 00:33:08 2012
@@ -0,0 +1,663 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hcatalog.api;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.InvalidPartitionException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.PartitionEventType;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.UnknownDBException;
+import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
+import org.apache.hadoop.hive.metastore.api.UnknownTableException;
+import org.apache.hcatalog.common.HCatException;
+import org.apache.hcatalog.common.HCatUtil;
+import org.apache.thrift.TException;
+
+/**
+ * The HCatClientHMSImpl is the Hive Metastore client based implementation of
+ * HCatClient.
+ */
+public class HCatClientHMSImpl extends HCatClient {
+
+    private HiveMetaStoreClient hmsClient;
+    private Configuration  config;
+    private HiveConf hiveConfig;
+
+    @Override
+    public List<String> listDatabaseNamesByPattern(String pattern)
+            throws HCatException, ConnectionFailureException {
+        List<String> dbNames = null;
+        try {
+            dbNames = hmsClient.getDatabases(pattern);
+        } catch (MetaException exp) {
+            throw new HCatException("MetaException while listing db names", exp);
+        }
+        return dbNames;
+    }
+
+    @Override
+    public HCatDatabase getDatabase(String dbName) throws HCatException,
+            ConnectionFailureException {
+        HCatDatabase db = null;
+        try {
+            Database hiveDB = hmsClient.getDatabase(checkDB(dbName));
+            if (hiveDB != null) {
+                db = new HCatDatabase(hiveDB);
+            }
+        } catch (NoSuchObjectException exp) {
+            throw new HCatException(
+                    "NoSuchObjectException while fetching database", exp);
+        } catch (MetaException exp) {
+            throw new HCatException("MetaException while fetching database",
+                    exp);
+        } catch (TException exp) {
+            throw new ConnectionFailureException(
+                    "TException while fetching database", exp);
+        }
+        return db;
+    }
+
+    @Override
+    public void createDatabase(HCatCreateDBDesc dbInfo) throws HCatException,
+            ConnectionFailureException {
+        try {
+            hmsClient.createDatabase(dbInfo.toHiveDb());
+        } catch (AlreadyExistsException exp) {
+            if (!dbInfo.getIfNotExists()) {
+                throw new HCatException(
+                        "AlreadyExistsException while creating database", exp);
+            }
+        } catch (InvalidObjectException exp) {
+            throw new HCatException(
+                    "InvalidObjectException while creating database", exp);
+        } catch (MetaException exp) {
+            throw new HCatException("MetaException while creating database",
+                    exp);
+        } catch (TException exp) {
+            throw new ConnectionFailureException(
+                    "TException while creating database", exp);
+        }
+    }
+
+    @Override
+    public void dropDatabase(String dbName, boolean ifExists, DROP_DB_MODE mode)
+            throws HCatException, ConnectionFailureException {
+        boolean isCascade;
+        if (mode.toString().equalsIgnoreCase("cascade")) {
+            isCascade = true;
+        } else {
+            isCascade = false;
+        }
+        try {
+            hmsClient.dropDatabase(checkDB(dbName), true, ifExists, isCascade);
+        } catch (NoSuchObjectException e) {
+            if (!ifExists) {
+                throw new HCatException(
+                        "NoSuchObjectException while dropping db.", e);
+            }
+        } catch (InvalidOperationException e) {
+            throw new HCatException(
+                    "InvalidOperationException while dropping db.", e);
+        } catch (MetaException e) {
+            throw new HCatException("MetaException while dropping db.", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException("TException while dropping db.",
+                    e);
+        }
+    }
+
+    @Override
+    public List<String> listTableNamesByPattern(String dbName,
+            String tablePattern) throws HCatException, ConnectionFailureException {
+        List<String> tableNames = null;
+        try {
+            tableNames = hmsClient.getTables(checkDB(dbName), tablePattern);
+        } catch (MetaException e) {
+            throw new HCatException(
+                    "MetaException while fetching table names.", e);
+        }
+        return tableNames;
+    }
+
+    @Override
+    public HCatTable getTable(String dbName, String tableName)
+            throws HCatException, ConnectionFailureException {
+        HCatTable table = null;
+        try {
+            Table hiveTable = hmsClient.getTable(checkDB(dbName), tableName);
+            if (hiveTable != null) {
+                table = new HCatTable(hiveTable);
+            }
+        } catch (MetaException e) {
+            throw new HCatException("MetaException while fetching table.", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while fetching table.", e);
+        } catch (NoSuchObjectException e) {
+            throw new HCatException(
+                    "NoSuchObjectException while fetching table.", e);
+        }
+        return table;
+    }
+
+    @Override
+    public void createTable(HCatCreateTableDesc createTableDesc)
+            throws HCatException, ConnectionFailureException {
+        try {
+            hmsClient.createTable(createTableDesc.toHiveTable(hiveConfig));
+        } catch (AlreadyExistsException e) {
+            if (createTableDesc.getIfNotExists() == false) {
+                throw new HCatException(
+                        "AlreadyExistsException while creating table.", e);
+            }
+        } catch (InvalidObjectException e) {
+            throw new HCatException(
+                    "InvalidObjectException while creating table.", e);
+        } catch (MetaException e) {
+            throw new HCatException("MetaException while creating table.", e);
+        } catch (NoSuchObjectException e) {
+            throw new HCatException(
+                    "NoSuchObjectException while creating table.", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while creating table.", e);
+        } catch (IOException e) {
+            throw new HCatException("IOException while creating hive conf.", e);
+        }
+
+    }
+
+    @Override
+    public void createTableLike(String dbName, String existingTblName,
+            String newTableName, boolean ifNotExists, boolean isExternal,
+            String location) throws HCatException, ConnectionFailureException {
+
+        Table hiveTable = getHiveTableLike(checkDB(dbName), existingTblName,
+                newTableName, ifNotExists, location);
+        if (hiveTable != null) {
+            try {
+                hmsClient.createTable(hiveTable);
+            } catch (AlreadyExistsException e) {
+                if (!ifNotExists) {
+                    throw new HCatException(
+                            "A table already exists with the name "
+                                    + newTableName, e);
+                }
+            } catch (InvalidObjectException e) {
+                throw new HCatException(
+                        "InvalidObjectException in create table like command.",
+                        e);
+            } catch (MetaException e) {
+                throw new HCatException(
+                        "MetaException in create table like command.", e);
+            } catch (NoSuchObjectException e) {
+                throw new HCatException(
+                        "NoSuchObjectException in create table like command.",
+                        e);
+            } catch (TException e) {
+                throw new ConnectionFailureException(
+                        "TException in create table like command.", e);
+            }
+        }
+    }
+
+    @Override
+    public void dropTable(String dbName, String tableName, boolean ifExists)
+            throws HCatException, ConnectionFailureException {
+        try {
+            hmsClient.dropTable(checkDB(dbName), tableName);
+        } catch (NoSuchObjectException e) {
+            if (!ifExists) {
+                throw new HCatException(
+                        "NoSuchObjectException while dropping table.", e);
+            }
+        } catch (MetaException e) {
+            throw new HCatException("MetaException while dropping table.", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while dropping table.", e);
+        }
+    }
+
+    @Override
+    public void renameTable(String dbName, String oldName, String newName)
+            throws HCatException, ConnectionFailureException {
+        Table tbl;
+        try {
+            Table oldtbl = hmsClient.getTable(checkDB(dbName), oldName);
+            if (oldtbl != null) {
+                // TODO : Should be moved out.
+                if (oldtbl
+                        .getParameters()
+                        .get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE) != null) {
+                    throw new HCatException(
+                            "Cannot use rename command on a non-native table");
+                }
+                tbl = new Table(oldtbl);
+                tbl.setTableName(newName);
+                hmsClient.alter_table(checkDB(dbName), oldName, tbl);
+            }
+        } catch (MetaException e) {
+            throw new HCatException("MetaException while renaming table", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while renaming table", e);
+        } catch (NoSuchObjectException e) {
+            throw new HCatException(
+                    "NoSuchObjectException while renaming table", e);
+        } catch (InvalidOperationException e) {
+            throw new HCatException(
+                    "InvalidOperationException while renaming table", e);
+        }
+    }
+
+    @Override
+    public List<HCatPartition> getPartitions(String dbName, String tblName)
+            throws HCatException, ConnectionFailureException {
+        List<HCatPartition> hcatPtns = new ArrayList<HCatPartition>();
+        try {
+            List<Partition> hivePtns = hmsClient.listPartitions(
+                    checkDB(dbName), tblName, (short) -1);
+            for (Partition ptn : hivePtns) {
+                hcatPtns.add(new HCatPartition(ptn));
+            }
+        } catch (NoSuchObjectException e) {
+            throw new HCatException(
+                    "NoSuchObjectException while retrieving partition.", e);
+        } catch (MetaException e) {
+            throw new HCatException(
+                    "MetaException while retrieving partition.", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while retrieving partition.", e);
+        }
+        return hcatPtns;
+    }
+
+    @Override
+    public HCatPartition getPartition(String dbName, String tableName,
+            Map<String, String> partitionSpec) throws HCatException,
+            ConnectionFailureException {
+        HCatPartition partition = null;
+        try {
+            ArrayList<String> ptnValues = new ArrayList<String>();
+            ptnValues.addAll(partitionSpec.values());
+            Partition hivePartition = hmsClient.getPartition(checkDB(dbName),
+                    tableName, ptnValues);
+            if (hivePartition != null) {
+                partition = new HCatPartition(hivePartition);
+            }
+        } catch (MetaException e) {
+            throw new HCatException(
+                    "MetaException while retrieving partition.", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while retrieving partition.", e);
+        } catch (NoSuchObjectException e) {
+            throw new HCatException(
+                    "NoSuchObjectException while retrieving partition.", e);
+        }
+        return partition;
+    }
+
+    @Override
+    public void addPartition(HCatAddPartitionDesc partInfo)
+            throws HCatException, ConnectionFailureException {
+        Table tbl = null;
+        try {
+            tbl = hmsClient.getTable(partInfo.getDatabaseName(),
+                    partInfo.getTableName());
+            // TODO: Should be moved out.
+            if (tbl.getPartitionKeysSize() == 0) {
+                throw new HCatException("The table " + partInfo.getTableName()
+                        + " is not partitioned.");
+            }
+
+            hmsClient.add_partition(partInfo.toHivePartition(tbl));
+        } catch (InvalidObjectException e) {
+            throw new HCatException(
+                    "InvalidObjectException while adding partition.", e);
+        } catch (AlreadyExistsException e) {
+            throw new HCatException(
+                    "AlreadyExistsException while adding partition.", e);
+        } catch (MetaException e) {
+            throw new HCatException("MetaException while adding partition.", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while adding partition.", e);
+        } catch (NoSuchObjectException e) {
+            throw new HCatException("The table " + partInfo.getTableName()
+                    + " is could not be found.", e);
+        }
+    }
+
+    @Override
+    public void dropPartition(String dbName, String tableName,
+            Map<String, String> partitionSpec, boolean ifExists)
+            throws HCatException, ConnectionFailureException {
+        try {
+            List<String> ptnValues = new ArrayList<String>();
+            ptnValues.addAll(partitionSpec.values());
+            hmsClient.dropPartition(checkDB(dbName), tableName, ptnValues,
+                    ifExists);
+        } catch (NoSuchObjectException e) {
+            if (!ifExists) {
+                throw new HCatException(
+                        "NoSuchObjectException while dropping partition.", e);
+            }
+        } catch (MetaException e) {
+            throw new HCatException("MetaException while dropping partition.",
+                    e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while dropping partition.", e);
+        }
+    }
+
+    @Override
+    public List<HCatPartition> listPartitionsByFilter(String dbName,
+            String tblName, String filter) throws HCatException,
+            ConnectionFailureException {
+        List<HCatPartition> hcatPtns = new ArrayList<HCatPartition>();
+        try {
+            List<Partition> hivePtns = hmsClient.listPartitionsByFilter(
+                    checkDB(dbName), tblName, filter, (short) -1);
+            for (Partition ptn : hivePtns) {
+                hcatPtns.add(new HCatPartition(ptn));
+            }
+        } catch (MetaException e) {
+            throw new HCatException("MetaException while fetching partitions.",
+                    e);
+        } catch (NoSuchObjectException e) {
+            throw new HCatException(
+                    "NoSuchObjectException while fetching partitions.", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while fetching partitions.", e);
+        }
+        return hcatPtns;
+    }
+
+    @Override
+    public void markPartitionForEvent(String dbName, String tblName,
+            Map<String, String> partKVs, PartitionEventType eventType)
+            throws HCatException, ConnectionFailureException {
+        try {
+            hmsClient.markPartitionForEvent(checkDB(dbName), tblName, partKVs,
+                    eventType);
+        } catch (MetaException e) {
+            throw new HCatException(
+                    "MetaException while marking partition for event.", e);
+        } catch (NoSuchObjectException e) {
+            throw new HCatException(
+                    "NoSuchObjectException while marking partition for event.",
+                    e);
+        } catch (UnknownTableException e) {
+            throw new HCatException(
+                    "UnknownTableException while marking partition for event.",
+                    e);
+        } catch (UnknownDBException e) {
+            throw new HCatException(
+                    "UnknownDBException while marking partition for event.", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while marking partition for event.", e);
+        } catch (InvalidPartitionException e) {
+            throw new HCatException(
+                    "InvalidPartitionException while marking partition for event.",
+                    e);
+        } catch (UnknownPartitionException e) {
+            throw new HCatException(
+                    "UnknownPartitionException while marking partition for event.",
+                    e);
+        }
+    }
+
+    @Override
+    public boolean isPartitionMarkedForEvent(String dbName, String tblName,
+            Map<String, String> partKVs, PartitionEventType eventType)
+            throws HCatException, ConnectionFailureException {
+        boolean isMarked = false;
+        try {
+            isMarked = hmsClient.isPartitionMarkedForEvent(checkDB(dbName),
+                    tblName, partKVs, eventType);
+        } catch (MetaException e) {
+            throw new HCatException(
+                    "MetaException while checking partition for event.", e);
+        } catch (NoSuchObjectException e) {
+            throw new HCatException(
+                    "NoSuchObjectException while checking partition for event.",
+                    e);
+        } catch (UnknownTableException e) {
+            throw new HCatException(
+                    "UnknownTableException while checking partition for event.",
+                    e);
+        } catch (UnknownDBException e) {
+            throw new HCatException(
+                    "UnknownDBException while checking partition for event.", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while checking partition for event.", e);
+        } catch (InvalidPartitionException e) {
+            throw new HCatException(
+                    "InvalidPartitionException while checking partition for event.",
+                    e);
+        } catch (UnknownPartitionException e) {
+            throw new HCatException(
+                    "UnknownPartitionException while checking partition for event.",
+                    e);
+        }
+        return isMarked;
+    }
+
+    @Override
+    public String getDelegationToken(String owner,
+            String renewerKerberosPrincipalName) throws HCatException,
+            ConnectionFailureException {
+        String token = null;
+        try {
+            token = hmsClient.getDelegationToken(owner,
+                    renewerKerberosPrincipalName);
+        } catch (MetaException e) {
+            throw new HCatException(
+                    "MetaException while getting delegation token.", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while getting delegation token.", e);
+        }
+
+        return token;
+    }
+
+    @Override
+    public long renewDelegationToken(String tokenStrForm) throws HCatException,
+            ConnectionFailureException {
+        long time = 0;
+        try {
+            time = hmsClient.renewDelegationToken(tokenStrForm);
+        } catch (MetaException e) {
+            throw new HCatException(
+                    "MetaException while renewing delegation token.", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while renewing delegation token.", e);
+        }
+
+        return time;
+    }
+
+    @Override
+    public void cancelDelegationToken(String tokenStrForm)
+            throws HCatException, ConnectionFailureException {
+        try {
+            hmsClient.cancelDelegationToken(tokenStrForm);
+        } catch (MetaException e) {
+            throw new HCatException(
+                    "MetaException while canceling delegation token.", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while canceling delegation token.", e);
+        }
+    }
+
+    /*
+     * @param conf /* @throws HCatException,ConnectionFailureException
+     *
+     * @see
+     * org.apache.hcatalog.api.HCatClient#initialize(org.apache.hadoop.conf.
+     * Configuration)
+     */
+    @Override
+    void initialize(Configuration conf) throws HCatException,
+            ConnectionFailureException {
+        this.config = conf;
+        try {
+            hiveConfig = HCatUtil.getHiveConf(config);
+            hmsClient = HCatUtil.createHiveClient(hiveConfig);
+        } catch (MetaException exp) {
+            throw new HCatException("MetaException while creating HMS client",
+                    exp);
+        } catch (IOException exp) {
+            throw new HCatException("IOException while creating HMS client",
+                    exp);
+        }
+
+    }
+
+    private Table getHiveTableLike(String dbName, String existingTblName,
+            String newTableName, boolean isExternal, String location)
+            throws HCatException, ConnectionFailureException {
+        Table oldtbl = null;
+        Table newTable = null;
+        try {
+            oldtbl = hmsClient.getTable(checkDB(dbName), existingTblName);
+        } catch (MetaException e1) {
+            throw new HCatException(
+                    "MetaException while retrieving existing table.", e1);
+        } catch (TException e1) {
+            throw new ConnectionFailureException(
+                    "TException while retrieving existing table.", e1);
+        } catch (NoSuchObjectException e1) {
+            throw new HCatException(
+                    "NoSuchObjectException while retrieving existing table.",
+                    e1);
+        }
+        if (oldtbl != null) {
+            newTable = new Table();
+            newTable.setTableName(newTableName);
+            newTable.setDbName(dbName);
+            StorageDescriptor sd = new StorageDescriptor(oldtbl.getSd());
+            newTable.setSd(sd);
+            newTable.setParameters(oldtbl.getParameters());
+            if (location == null) {
+                newTable.getSd().setLocation(oldtbl.getSd().getLocation());
+            } else {
+                newTable.getSd().setLocation(location);
+            }
+            if (isExternal) {
+                newTable.putToParameters("EXTERNAL", "TRUE");
+                newTable.setTableType(TableType.EXTERNAL_TABLE.toString());
+            } else {
+                newTable.getParameters().remove("EXTERNAL");
+            }
+            // set create time
+            newTable.setCreateTime((int) (System.currentTimeMillis() / 1000));
+            newTable.setLastAccessTimeIsSet(false);
+        }
+        return newTable;
+    }
+
+    /*
+     * @throws HCatException
+     *
+     * @see org.apache.hcatalog.api.HCatClient#closeClient()
+     */
+    @Override
+    public void close() throws HCatException {
+        hmsClient.close();
+    }
+
+    private String checkDB(String name) {
+        if (StringUtils.isEmpty(name)) {
+            return MetaStoreUtils.DEFAULT_DATABASE_NAME;
+        } else {
+            return name;
+        }
+    }
+
+    /*
+     * @param partInfoList
+     *  @return The size of the list of partitions.
+     * @throws HCatException,ConnectionFailureException
+     * @see org.apache.hcatalog.api.HCatClient#addPartitions(java.util.List)
+     */
+    @Override
+    public int addPartitions(List<HCatAddPartitionDesc> partInfoList)
+            throws HCatException, ConnectionFailureException {
+        int numPartitions = -1;
+        if ((partInfoList == null) || (partInfoList.size() == 0)) {
+            throw new HCatException("The partition list is null or empty.");
+        }
+
+        Table tbl = null;
+        try {
+            tbl = hmsClient.getTable(partInfoList.get(0).getDatabaseName(),
+                    partInfoList.get(0).getTableName());
+            ArrayList<Partition> ptnList = new ArrayList<Partition>();
+            for (HCatAddPartitionDesc desc : partInfoList) {
+                ptnList.add(desc.toHivePartition(tbl));
+            }
+            numPartitions = hmsClient.add_partitions(ptnList);
+        } catch (InvalidObjectException e) {
+            throw new HCatException(
+                    "InvalidObjectException while adding partition.", e);
+        } catch (AlreadyExistsException e) {
+            throw new HCatException(
+                    "AlreadyExistsException while adding partition.", e);
+        } catch (MetaException e) {
+            throw new HCatException("MetaException while adding partition.", e);
+        } catch (TException e) {
+            throw new ConnectionFailureException(
+                    "TException while adding partition.", e);
+        } catch (NoSuchObjectException e) {
+            throw new HCatException("The table "
+                    + partInfoList.get(0).getTableName()
+                    + " is could not be found.", e);
+        }
+        return numPartitions;
+    }
+
+}

Added: incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateDBDesc.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateDBDesc.java?rev=1371536&view=auto
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateDBDesc.java (added)
+++ incubator/hcatalog/branches/branch-0.4/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateDBDesc.java Fri Aug 10 00:33:08 2012
@@ -0,0 +1,193 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hcatalog.api;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hcatalog.common.HCatException;
+
+/**
+ * The Class HCatCreateDBDesc for defining database attributes.
+ */
+public class HCatCreateDBDesc {
+
+    private String dbName;
+    private String locationUri;
+    private String comment;
+    private Map<String, String> dbProperties;
+    private boolean ifNotExits = false;
+
+    /**
+     * Gets the database properties.
+     *
+     * @return the database properties
+     */
+    public Map<String, String> getDatabaseProperties() {
+        return this.dbProperties;
+    }
+
+    /**
+     * Gets the if not exists.
+     *
+     * @return the if not exists
+     */
+    public boolean getIfNotExists(){
+        return this.ifNotExits;
+    }
+
+    /**
+     * Gets the comments.
+     *
+     * @return the comments
+     */
+    public String getComments() {
+        return this.comment;
+    }
+
+    /**
+     * Gets the location.
+     *
+     * @return the location
+     */
+    public String getLocation() {
+        return this.locationUri;
+    }
+
+    /**
+     * Gets the database name.
+     *
+     * @return the database name
+     */
+    public String getDatabaseName() {
+        return this.dbName;
+    }
+
+    private HCatCreateDBDesc(String dbName){
+       this.dbName = dbName;
+    }
+
+    @Override
+    public String toString() {
+        return "HCatCreateDBDesc ["
+                + (dbName != null ? "dbName=" + dbName + ", " : "dbName=null")
+                + (locationUri != null ? "location=" + locationUri + ", "
+                        : "location=null")
+                + (comment != null ? "comment=" + comment + ", " : "comment=null")
+                + (dbProperties != null ? "dbProperties=" + dbProperties + ", "
+                        : "dbProperties=null") + "ifNotExits=" + ifNotExits + "]";
+    }
+
+    /**
+     * Creates the builder for defining attributes.
+     *
+     * @param dbName the db name
+     * @return the builder
+     */
+    public static Builder create(String dbName){
+        return new Builder(dbName);
+    }
+
+    Database toHiveDb(){
+        Database hiveDB = new Database();
+        hiveDB.setDescription(this.comment);
+        hiveDB.setLocationUri(this.locationUri);
+        hiveDB.setName(this.dbName);
+        hiveDB.setParameters(this.dbProperties);
+        return hiveDB;
+    }
+
+    public static class Builder {
+
+        private String innerLoc;
+        private String innerComment;
+        private Map<String, String> innerDBProps;
+        private String dbName;
+        private boolean ifNotExists = false;
+
+        private Builder(String dbName){
+            this.dbName = dbName;
+        }
+
+        /**
+         * Location.
+         *
+         * @param value the location of the database.
+         * @return the builder
+         */
+        public Builder location(String value){
+            this.innerLoc = value;
+            return this;
+        }
+
+        /**
+         * Comment.
+         *
+         * @param value comments.
+         * @return the builder
+         */
+        public Builder comment(String value){
+            this.innerComment = value;
+            return this;
+        }
+
+        /**
+         * If not exists.
+         * @param ifNotExists If set to true, hive will not throw exception, if a
+         * database with the same name already exists.
+         * @return the builder
+         */
+        public Builder ifNotExists(boolean ifNotExists){
+            this.ifNotExists = ifNotExists;
+            return this;
+        }
+
+        /**
+         * Database properties.
+         *
+         * @param dbProps the database properties
+         * @return the builder
+         */
+        public Builder databaseProperties(Map<String, String> dbProps) {
+            this.innerDBProps = dbProps;
+            return this;
+        }
+
+
+        /**
+         * Builds the create database descriptor.
+         *
+         * @return An instance of HCatCreateDBDesc
+         * @throws HCatException
+         */
+        public HCatCreateDBDesc build() throws HCatException {
+            if(this.dbName == null){
+                throw new HCatException("Database name cannot be null.");
+            }
+            HCatCreateDBDesc desc = new HCatCreateDBDesc(this.dbName);
+            desc.comment = this.innerComment;
+            desc.locationUri = this.innerLoc;
+            desc.dbProperties = this.innerDBProps;
+            desc.ifNotExits = this.ifNotExists;
+            return desc;
+
+        }
+
+    }
+
+}



Mime
View raw message