hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From br...@apache.org
Subject svn commit: r1537576 [3/23] - in /hive/trunk: ./ ant/ ant/src/org/apache/hadoop/hive/ant/ beeline/ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/apache/hive/beeline/src/test/ cli/ common/ common/src/java/conf/ common/src/scripts/ commo...
Date Thu, 31 Oct 2013 18:27:46 GMT
Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java Thu Oct 31 18:27:31 2013
@@ -74,15 +74,15 @@ public class TestHCatPartitionPublish {
   private static HiveConf hcatConf;
   private static HiveMetaStoreClient msc;
   private static SecurityManager securityManager;
+  private static Configuration conf = new Configuration(true);
 
   @BeforeClass
   public static void setup() throws Exception {
-    String testDir = System.getProperty("test.data.dir", "./");
+    String testDir = System.getProperty("test.tmp.dir", "./");
     testDir = testDir + "/test_hcat_partitionpublish_" + Math.abs(new Random().nextLong()) + "/";
     File workDir = new File(new File(testDir).getCanonicalPath());
     FileUtil.fullyDelete(workDir);
     workDir.mkdirs();
-    Configuration conf = new Configuration(true);
     conf.set("yarn.scheduler.capacity.root.queues", "default");
     conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
 
@@ -158,8 +158,9 @@ public class TestHCatPartitionPublish {
     // In Windows, we cannot remove the output directory when job fail. See
     // FileOutputCommitterContainer.abortJob
     if (!Shell.WINDOWS) {
-      Assert.assertFalse(fs.exists(new Path(table.getSd().getLocation()
-          + "/part1=p1value1/part0=p0value1")));
+      Path path = new Path(table.getSd().getLocation()
+          + "/part1=p1value1/part0=p0value1");
+      Assert.assertFalse(path.getFileSystem(conf).exists(path));
     }
   }
 

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestMultiOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestMultiOutputFormat.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestMultiOutputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestMultiOutputFormat.java Thu Oct 31 18:27:31 2013
@@ -85,7 +85,7 @@ public class TestMultiOutputFormat {
   }
 
   private static void createWorkDir() throws IOException {
-    String testDir = System.getProperty("test.data.dir", "./");
+    String testDir = System.getProperty("test.tmp.dir", "./");
     testDir = testDir + "/test_multiout_" + Math.abs(new Random().nextLong()) + "/";
     workDir = new File(new File(testDir).getCanonicalPath());
     FileUtil.fullyDelete(workDir);

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java Thu Oct 31 18:27:31 2013
@@ -66,7 +66,7 @@ public class TestSequenceFileReadWrite {
 
   @Before
   public void setup() throws Exception {
-    dataDir = new File(System.getProperty("java.io.tmpdir") + File.separator + 
+    dataDir = new File(System.getProperty("java.io.tmpdir") + File.separator +
         TestSequenceFileReadWrite.class.getCanonicalName() + "-" + System.currentTimeMillis());
     hiveConf = new HiveConf(this.getClass());
     warehouseDir = new File(dataDir, "warehouse").getAbsolutePath();

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java Thu Oct 31 18:27:31 2013
@@ -68,7 +68,7 @@ public class TestRCFileMapReduceInputFor
   static {
     try {
       fs = FileSystem.getLocal(conf);
-      Path dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
+      Path dir = new Path(System.getProperty("test.tmp.dir", ".") + "/mapred");
       file = new Path(dir, "test_rcfile");
       fs.delete(dir, true);
       // the SerDe part is from TestLazySimpleSerDe
@@ -203,7 +203,7 @@ public class TestRCFileMapReduceInputFor
   private void writeThenReadByRecordReader(int intervalRecordCount,
                        int writeCount, int splitNumber, long maxSplitSize, CompressionCodec codec)
     throws IOException, InterruptedException {
-    Path testDir = new Path(System.getProperty("test.data.dir", ".")
+    Path testDir = new Path(System.getProperty("test.tmp.dir", ".")
       + "/mapred/testsmallfirstsplit");
     Path testFile = new Path(testDir, "test_rcfile");
     fs.delete(testFile, true);

Added: hive/trunk/hcatalog/hcatalog-pig-adapter/pom-new.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/hcatalog-pig-adapter/pom-new.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/hcatalog/hcatalog-pig-adapter/pom-new.xml (added)
+++ hive/trunk/hcatalog/hcatalog-pig-adapter/pom-new.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,100 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+     xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+     xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive.hcatalog</groupId>
+    <artifactId>hive-hcatalog</artifactId>
+    <version>0.13.0-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-hcatalog-pig-adapter</artifactId>
+  <packaging>jar</packaging>
+  <name>Hive HCatalog Pig Adapter</name>
+
+  <properties>
+    <hive.path.to.root>../..</hive.path.to.root>
+  </properties>
+
+  <dependencies>
+    <!-- intra-project -->
+    <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hcatalog-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <!-- test intra-project -->
+    <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hcatalog-core</artifactId>
+      <version>${project.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+
+  <profiles>
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.pig</groupId>
+          <artifactId>pig</artifactId>
+          <version>${pig.version}</version>
+        </dependency>
+      </dependencies>
+    </profile>
+   <profile>
+      <id>hadoop-2</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.pig</groupId>
+          <artifactId>pig</artifactId>
+          <version>${pig.version}</version>
+          <classifier>h2</classifier>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+</project>

Added: hive/trunk/hcatalog/pom-new.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/pom-new.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/hcatalog/pom-new.xml (added)
+++ hive/trunk/hcatalog/pom-new.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,97 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+     xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+     xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive</groupId>
+    <artifactId>hive</artifactId>
+    <version>0.13.0-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <groupId>org.apache.hive.hcatalog</groupId>
+  <artifactId>hive-hcatalog</artifactId>
+  <packaging>pom</packaging>
+  <name>Hive HCatalog</name>
+
+  <properties>
+    <hive.path.to.root>..</hive.path.to.root>
+  </properties>
+
+  <modules>
+    <module>core</module>
+    <module>hcatalog-pig-adapter</module>
+    <module>server-extensions</module>
+    <module>webhcat/java-client</module>
+    <module>webhcat/svr</module>
+    <module>storage-handlers/hbase</module>
+  </modules>
+
+  <profiles>
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.pig</groupId>
+          <artifactId>pig</artifactId>
+          <version>${pig.version}</version>
+          <scope>test</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+   <profile>
+      <id>hadoop-2</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.pig</groupId>
+          <artifactId>pig</artifactId>
+          <version>${pig.version}</version>
+          <classifier>h2</classifier>
+          <scope>test</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+
+</project>

Added: hive/trunk/hcatalog/server-extensions/pom-new.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/server-extensions/pom-new.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/hcatalog/server-extensions/pom-new.xml (added)
+++ hive/trunk/hcatalog/server-extensions/pom-new.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,125 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+     xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+     xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive.hcatalog</groupId>
+    <artifactId>hive-hcatalog</artifactId>
+    <version>0.13.0-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-hcatalog-server-extensions</artifactId>
+  <packaging>jar</packaging>
+  <name>Hive HCatalog Server Extensions</name>
+
+  <properties>
+    <hive.path.to.root>../..</hive.path.to.root>
+  </properties>
+
+  <dependencies>
+    <!-- intra-project -->
+    <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hcatalog-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <!-- inter-project -->
+    <dependency>
+      <groupId>javax.jms</groupId>
+      <artifactId>jms</artifactId>
+      <version>${jms.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+    <!-- test intra-project -->
+    <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hcatalog-core</artifactId>
+      <version>${project.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <!-- test inter-project -->
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>${junit.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.activemq</groupId>
+      <artifactId>activemq-core</artifactId>
+      <version>${activemq.version}</version>
+      <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>org.springframework</groupId>
+          <artifactId>spring-context</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.activemq</groupId>
+      <artifactId>kahadb</artifactId>
+      <version>${activemq.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.pig</groupId>
+      <artifactId>pig</artifactId>
+      <version>${pig.version}</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+        </dependency>
+      </dependencies>
+    </profile>
+   <profile>
+      <id>hadoop-2</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+</project>

Added: hive/trunk/hcatalog/storage-handlers/hbase/pom-new.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/pom-new.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/pom-new.xml (added)
+++ hive/trunk/hcatalog/storage-handlers/hbase/pom-new.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,202 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+     xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+     xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+  <groupId>org.apache.hive.hcatalog</groupId>
+  <artifactId>hive-hcatalog</artifactId>
+  <version>0.13.0-SNAPSHOT</version>
+  <relativePath>../../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-hbase-storage-handler</artifactId>
+  <packaging>jar</packaging>
+  <name>Hive HCatalog HBase Storage Handler</name>
+
+  <properties>
+    <hive.path.to.root>../../..</hive.path.to.root>
+  </properties>
+
+  <dependencies>
+    <!-- intra-project -->
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-hbase-handler</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hcatalog-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <!-- inter-project -->
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+      <version>${guava.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase</artifactId>
+      <version>${hbase.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.thrift</groupId>
+          <artifactId>libthrift</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.thrift</groupId>
+      <artifactId>libthrift</artifactId>
+      <version>${libthrift.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper</artifactId>
+      <version>${zookeeper.version}</version>
+    </dependency>
+    <!-- test inter-project -->
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+      <version>${commons-io.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase</artifactId>
+      <version>${hbase.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>com.google.guava</groupId>
+          <artifactId>guava</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper</artifactId>
+      <version>${zookeeper.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+        </dependency>
+        <!-- test -->
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-test</artifactId>
+          <version>${hadoop-20S.version}</version>
+          <scope>test</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+   <profile>
+      <id>hadoop-2</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+        <!-- test -->
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+
+  <build>
+    <sourceDirectory>${basedir}/src/java</sourceDirectory>
+    <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+    <resources>
+      <resource>
+        <directory>${basedir}/src/resources</directory>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <goals>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>add-source</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>src/gen-java</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java Thu Oct 31 18:27:31 2013
@@ -36,14 +36,13 @@ import org.apache.hadoop.hbase.HTableDes
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.junit.AfterClass;
-import org.junit.BeforeClass;
 
 /**
  * Base class for HBase Tests which need a mini cluster instance
  */
 public abstract class SkeletonHBaseTest {
 
-  protected static String TEST_DIR = "/tmp/build/test/data/";
+  protected static String TEST_DIR = System.getProperty("test.tmp.dir", "target/tmp/");
 
   protected final static String DEFAULT_CONTEXT_HANDLE = "default";
 
@@ -56,20 +55,15 @@ public abstract class SkeletonHBaseTest 
    */
   protected static Configuration testConf = null;
 
-  protected void createTable(String tableName, String[] families) {
-    try {
-      HBaseAdmin admin = new HBaseAdmin(getHbaseConf());
-      HTableDescriptor tableDesc = new HTableDescriptor(tableName);
-      for (String family : families) {
-        HColumnDescriptor columnDescriptor = new HColumnDescriptor(family);
-        tableDesc.addFamily(columnDescriptor);
-      }
-      admin.createTable(tableDesc);
-    } catch (Exception e) {
-      e.printStackTrace();
-      throw new IllegalStateException(e);
+  protected void createTable(String tableName, String[] families) throws IOException {
+    HBaseAdmin admin = new HBaseAdmin(getHbaseConf());
+    HTableDescriptor tableDesc = new HTableDescriptor(tableName);
+    for (String family : families) {
+      HColumnDescriptor columnDescriptor = new HColumnDescriptor(family);
+      tableDesc.addFamily(columnDescriptor);
     }
-
+    admin.createTable(tableDesc);
+    admin.close();
   }
 
   protected String newTableName(String prefix) {
@@ -78,21 +72,20 @@ public abstract class SkeletonHBaseTest 
     do {
       name = prefix + "_" + Math.abs(new Random().nextLong());
     } while (tableNames.contains(name) && --tries > 0);
-    if (tableNames.contains(name))
+    if (tableNames.contains(name)) {
       throw new IllegalStateException("Couldn't find a unique table name, tableNames size: " + tableNames.size());
+    }
     tableNames.add(name);
     return name;
   }
 
-
   /**
    * startup an hbase cluster instance before a test suite runs
    */
-  @BeforeClass
-  public static void setup() {
-    if (!contextMap.containsKey(getContextHandle()))
+  public static void setupSkeletonHBaseTest() {
+    if (!contextMap.containsKey(getContextHandle())) {
       contextMap.put(getContextHandle(), new Context(getContextHandle()));
-
+    }
     contextMap.get(getContextHandle()).start();
   }
 

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java Thu Oct 31 18:27:31 2013
@@ -66,6 +66,7 @@ import org.apache.hcatalog.mapreduce.HCa
 import org.apache.hcatalog.mapreduce.HCatOutputFormat;
 import org.apache.hcatalog.mapreduce.OutputJobInfo;
 
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -89,6 +90,11 @@ public class TestHBaseBulkOutputFormat e
   private final HiveConf allConf;
   private final HCatDriver hcatDriver;
 
+  @BeforeClass
+  public static void setup() throws Throwable {
+    setupSkeletonHBaseTest();
+  }
+
   public TestHBaseBulkOutputFormat() {
     allConf = getHiveConf();
     allConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java Thu Oct 31 18:27:31 2013
@@ -63,6 +63,7 @@ import org.apache.hcatalog.hbase.snapsho
 import org.apache.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.hcatalog.mapreduce.HCatOutputFormat;
 import org.apache.hcatalog.mapreduce.OutputJobInfo;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 import java.io.IOException;
@@ -83,6 +84,11 @@ public class TestHBaseDirectOutputFormat
   private final HiveConf allConf;
   private final HCatDriver hcatDriver;
 
+  @BeforeClass
+  public static void setup() throws Throwable {
+    setupSkeletonHBaseTest();
+  }
+
   public TestHBaseDirectOutputFormat() {
     allConf = getHiveConf();
     allConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java Thu Oct 31 18:27:31 2013
@@ -41,6 +41,7 @@ import org.apache.hcatalog.cli.SemanticA
 import org.apache.hcatalog.hbase.snapshot.RevisionManager;
 import org.apache.hcatalog.hbase.snapshot.RevisionManagerConfiguration;
 import org.apache.zookeeper.KeeperException.NoNodeException;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 public class TestHBaseHCatStorageHandler extends SkeletonHBaseTest {
@@ -49,6 +50,11 @@ public class TestHBaseHCatStorageHandler
   private static HCatDriver hcatDriver;
   private static Warehouse  wh;
 
+  @BeforeClass
+  public static void setup() throws Throwable {
+    setupSkeletonHBaseTest();
+  }
+
   public void Initialize() throws Exception {
 
     hcatConf = getHiveConf();

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java Thu Oct 31 18:27:31 2013
@@ -73,6 +73,7 @@ import org.apache.hcatalog.hbase.snapsho
 import org.apache.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.hcatalog.mapreduce.InputJobInfo;
 import org.apache.hcatalog.mapreduce.PartInfo;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 public class TestHCatHBaseInputFormat extends SkeletonHBaseTest {
@@ -83,6 +84,11 @@ public class TestHCatHBaseInputFormat ex
   private final byte[] QUALIFIER1 = Bytes.toBytes("testQualifier1");
   private final byte[] QUALIFIER2 = Bytes.toBytes("testQualifier2");
 
+  @BeforeClass
+  public static void setup() throws Throwable {
+    setupSkeletonHBaseTest();
+  }
+
   public TestHCatHBaseInputFormat() throws Exception {
     hcatConf = getHiveConf();
     hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname,

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java Thu Oct 31 18:27:31 2013
@@ -41,12 +41,18 @@ import org.apache.hcatalog.common.HCatUt
 import org.apache.hcatalog.hbase.snapshot.TableSnapshot;
 import org.apache.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.hcatalog.mapreduce.InputJobInfo;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 public class TestSnapshots extends SkeletonHBaseTest {
   private static HiveConf hcatConf;
   private static HCatDriver hcatDriver;
 
+  @BeforeClass
+  public static void setup() throws Throwable {
+    setupSkeletonHBaseTest();
+  }
+
   public void Initialize() throws Exception {
     hcatConf = getHiveConf();
     hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname,

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestIDGenerator.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestIDGenerator.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestIDGenerator.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestIDGenerator.java Thu Oct 31 18:27:31 2013
@@ -26,10 +26,16 @@ import java.util.HashMap;
 
 import org.apache.hcatalog.hbase.SkeletonHBaseTest;
 import org.junit.Assert;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 public class TestIDGenerator extends SkeletonHBaseTest {
 
+  @BeforeClass
+  public static void setup() throws Throwable {
+    setupSkeletonHBaseTest();
+  }
+
   @Test
   public void testIDGeneration() throws Exception {
 

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java Thu Oct 31 18:27:31 2013
@@ -32,10 +32,16 @@ import org.apache.hcatalog.hbase.snapsho
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.ZooKeeper;
 import org.apache.zookeeper.data.Stat;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 public class TestRevisionManager extends SkeletonHBaseTest {
 
+  @BeforeClass
+  public static void setup() throws Throwable {
+    setupSkeletonHBaseTest();
+  }
+
   @Test
   public void testBasicZNodeCreation() throws IOException, KeeperException, InterruptedException {
 

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java Thu Oct 31 18:27:31 2013
@@ -31,17 +31,20 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hcatalog.hbase.SkeletonHBaseTest;
 import org.junit.Assert;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 public class TestRevisionManagerEndpoint extends SkeletonHBaseTest {
 
-  static {
+  @BeforeClass
+  public static void setup() throws Throwable {
     // test case specific mini cluster settings
     testConf = new Configuration(false);
     testConf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
       "org.apache.hcatalog.hbase.snapshot.RevisionManagerEndpoint",
       "org.apache.hadoop.hbase.coprocessor.GenericEndpoint");
     testConf.set(RMConstants.REVISION_MGR_ENDPOINT_IMPL_CLASS, MockRM.class.getName());
+    setupSkeletonHBaseTest();
   }
 
   /**

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java Thu Oct 31 18:27:31 2013
@@ -38,6 +38,7 @@ import org.apache.hcatalog.cli.SemanticA
 import org.apache.hcatalog.hbase.SkeletonHBaseTest;
 import org.apache.zookeeper.ZooKeeper;
 import org.apache.zookeeper.data.Stat;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 
@@ -46,6 +47,11 @@ public class TestZNodeSetUp extends Skel
   private static HiveConf hcatConf;
   private static HCatDriver hcatDriver;
 
+  @BeforeClass
+  public static void setup() throws Throwable {
+    setupSkeletonHBaseTest();
+  }
+
   public void Initialize() throws Exception {
 
     hcatConf = getHiveConf();

Modified: hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java (original)
+++ hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java Thu Oct 31 18:27:31 2013
@@ -73,6 +73,7 @@ public class TestPigHBaseStorageHandler 
     URI fsuri = getFileSystem().getUri();
     Path whPath = new Path(fsuri.getScheme(), fsuri.getAuthority(),
         getTestDir());
+    hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
     hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
     hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
     hcatConf.set(ConfVars.METASTOREWAREHOUSE.varname, whPath.toString());

Added: hive/trunk/hcatalog/webhcat/java-client/pom-new.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/java-client/pom-new.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/hcatalog/webhcat/java-client/pom-new.xml (added)
+++ hive/trunk/hcatalog/webhcat/java-client/pom-new.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,88 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+     xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+     xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive.hcatalog</groupId>
+    <artifactId>hive-hcatalog</artifactId>
+    <version>0.13.0-SNAPSHOT</version>
+    <relativePath>../../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-webhcat-java-client</artifactId>
+  <packaging>jar</packaging>
+  <name>Hive HCatalog Webhcat Java Client</name>
+
+  <properties>
+    <hive.path.to.root>../../..</hive.path.to.root>
+  </properties>
+
+  <dependencies>
+    <!-- intra-project -->
+    <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hcatalog-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <!-- test intra-project -->
+    <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hcatalog-core</artifactId>
+      <version>${project.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+        </dependency>
+      </dependencies>
+    </profile>
+   <profile>
+      <id>hadoop-2</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+</project>

Modified: hive/trunk/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java (original)
+++ hive/trunk/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java Thu Oct 31 18:27:31 2013
@@ -36,7 +36,6 @@ import org.apache.hadoop.hive.ql.io.RCFi
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hcatalog.common.HCatConstants;
 import org.apache.hcatalog.common.HCatException;
 import org.apache.hcatalog.data.schema.HCatFieldSchema;
@@ -52,6 +51,7 @@ import static org.junit.Assert.assertEqu
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.fail;
 
 /**
  * @deprecated Use/modify {@link org.apache.hive.hcatalog.api.TestHCatClient} instead
@@ -123,9 +123,9 @@ public class TestHCatClient {
     assertTrue(testDb.getComment() == null);
     assertTrue(testDb.getProperties().size() == 0);
     String warehouseDir = System
-      .getProperty(ConfVars.METASTOREWAREHOUSE.varname, "/user/hive/warehouse");
-    assertTrue(testDb.getLocation().equals(
-      "file:" + warehouseDir + "/" + db + ".db"));
+      .getProperty("test.warehouse.dir", "/user/hive/warehouse");
+    String expectedDir = warehouseDir.replaceAll("\\\\", "/").replaceFirst("pfile:///", "pfile:/");
+    assertEquals(expectedDir + "/" + db + ".db", testDb.getLocation());
     ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
     cols.add(new HCatFieldSchema("id", Type.INT, "id comment"));
     cols.add(new HCatFieldSchema("value", Type.STRING, "value comment"));
@@ -145,6 +145,7 @@ public class TestHCatClient {
     // will result in an exception.
     try {
       client.createTable(tableDesc);
+      fail("Expected exception");
     } catch (HCatException e) {
       assertTrue(e.getMessage().contains(
         "AlreadyExistsException while creating table."));
@@ -159,8 +160,7 @@ public class TestHCatClient {
       TextInputFormat.class.getName()));
     assertTrue(table2.getOutputFileFormat().equalsIgnoreCase(
       IgnoreKeyTextOutputFormat.class.getName()));
-    assertTrue(table2.getLocation().equalsIgnoreCase(
-      "file:" + warehouseDir + "/" + db + ".db/" + tableTwo));
+    assertEquals((expectedDir + "/" + db + ".db/" + tableTwo).toLowerCase(), table2.getLocation().toLowerCase());
     client.close();
   }
 

Modified: hive/trunk/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java (original)
+++ hive/trunk/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java Thu Oct 31 18:27:31 2013
@@ -36,7 +36,6 @@ import org.apache.hadoop.hive.ql.io.RCFi
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
@@ -48,6 +47,7 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.junit.Assert.fail;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
@@ -120,13 +120,9 @@ public class TestHCatClient {
     assertTrue(testDb.getComment() == null);
     assertTrue(testDb.getProperties().size() == 0);
     String warehouseDir = System
-      .getProperty(ConfVars.METASTOREWAREHOUSE.varname, "/user/hive/warehouse");
-    String expectedDir = warehouseDir.replaceAll("\\\\", "/");
-    if (!expectedDir.startsWith("/")) {
-      expectedDir = "/" + expectedDir;
-    }
-    assertTrue(testDb.getLocation().equals(
-      "file:" + expectedDir + "/" + db + ".db"));
+      .getProperty("test.warehouse.dir", "/user/hive/warehouse");
+    String expectedDir = warehouseDir.replaceAll("\\\\", "/").replaceFirst("pfile:///", "pfile:/");
+    assertEquals(expectedDir + "/" + db + ".db", testDb.getLocation());
     ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
     cols.add(new HCatFieldSchema("id", Type.INT, "id comment"));
     cols.add(new HCatFieldSchema("value", Type.STRING, "value comment"));
@@ -146,6 +142,7 @@ public class TestHCatClient {
     // will result in an exception.
     try {
       client.createTable(tableDesc);
+      fail("Expected exception");
     } catch (HCatException e) {
       assertTrue(e.getMessage().contains(
         "AlreadyExistsException while creating table."));
@@ -160,8 +157,7 @@ public class TestHCatClient {
       TextInputFormat.class.getName()));
     assertTrue(table2.getOutputFileFormat().equalsIgnoreCase(
       IgnoreKeyTextOutputFormat.class.getName()));
-    assertTrue(table2.getLocation().equalsIgnoreCase(
-      "file:" + expectedDir + "/" + db + ".db/" + tableTwo));
+    assertEquals((expectedDir + "/" + db + ".db/" + tableTwo).toLowerCase(), table2.getLocation().toLowerCase());
     client.close();
   }
 

Added: hive/trunk/hcatalog/webhcat/svr/pom-new.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/pom-new.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/hcatalog/webhcat/svr/pom-new.xml (added)
+++ hive/trunk/hcatalog/webhcat/svr/pom-new.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,184 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+     xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+     xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive.hcatalog</groupId>
+    <artifactId>hive-hcatalog</artifactId>
+    <version>0.13.0-SNAPSHOT</version>
+    <relativePath>../../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-webhcat</artifactId>
+  <packaging>jar</packaging>
+  <name>Hive HCatalog Webhcat</name>
+
+  <properties>
+    <hive.path.to.root>../../..</hive.path.to.root>
+  </properties>
+
+  <dependencies>
+
+    <!-- intra-project -->
+    <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hcatalog-core</artifactId>
+      <version>${project.version}</version>
+      <scope>provided</scope>
+    </dependency>
+    <!-- inter-project -->
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-json</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-servlet</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey.contribs</groupId>
+      <artifactId>wadl-resourcedoc-doclet</artifactId>
+      <version>${wadl-resourcedoc-doclet.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-exec</artifactId>
+      <version>${commons-exec.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper</artifactId>
+      <version>${zookeeper.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-core-asl</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+     <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty.aggregate</groupId>
+      <artifactId>jetty-all-server</artifactId>
+      <version>${jetty.webhcat.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>jul-to-slf4j</artifactId>
+      <version>${slf4j.version}</version>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+        </dependency>
+      </dependencies>
+    </profile>
+   <profile>
+      <id>hadoop-2</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-auth</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-javadoc-plugin</artifactId>
+        <version>${maven-javadoc-plugin.version}</version>
+        <executions>
+          <execution>
+            <id>resourcesdoc.xml</id>
+            <goals>
+              <goal>javadoc</goal>
+            </goals>
+            <phase>compile</phase>
+            <configuration>
+              <encoding>${project.build.sourceEncoding}</encoding>
+              <verbose>true</verbose>
+              <show>public</show>
+              <doclet>com.sun.jersey.wadl.resourcedoc.ResourceDoclet</doclet>
+              <docletArtifacts>
+                <docletArtifact>
+                  <groupId>com.sun.jersey.contribs</groupId>
+                  <artifactId>wadl-resourcedoc-doclet</artifactId>
+                  <version>${wadl-resourcedoc-doclet.version}</version>
+                </docletArtifact>
+                <!--
+                    Also specify jersey and xerces as doclet artifacts as the ResourceDoclet
+                    uses classes provided by them to generate the resourcedoc.
+                 -->
+                <docletArtifact>
+                  <groupId>com.sun.jersey</groupId>
+                  <artifactId>jersey-server</artifactId>
+                  <version>${jersey.version}</version>
+                </docletArtifact>
+                <docletArtifact>
+                  <groupId>xerces</groupId>
+                  <artifactId>xercesImpl</artifactId>
+                  <version>${xerces.version}</version>
+                </docletArtifact>
+              </docletArtifacts>
+              <additionalparam>-output ${project.build.outputDirectory}/resourcedoc.xml</additionalparam>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>

Modified: hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java (original)
+++ hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java Thu Oct 31 18:27:31 2013
@@ -107,12 +107,12 @@ public class HcatDelegator extends Launc
     }
     LOG.info("Main.getAppConfigInstance().get(AppConfig.UNIT_TEST_MODE)=" +
         Main.getAppConfigInstance().get(AppConfig.UNIT_TEST_MODE));
-    if(System.getProperty("hive.metastore.warehouse.dir") != null) {
+    if(System.getProperty("test.warehouse.dir") != null) {
       /*when running in unit test mode, pass this property to HCat,
       which will in turn pass it to Hive to make sure that Hive
       tries to write to a directory that exists.*/
       args.add("-D");
-      args.add("hive.metastore.warehouse.dir=" + System.getProperty("hive.metastore.warehouse.dir"));
+      args.add("hive.metastore.warehouse.dir=" + System.getProperty("test.warehouse.dir"));
     }
     return args;
   }
@@ -645,7 +645,7 @@ public class HcatDelegator extends Launc
     } catch (HcatException e) {
       if (e.execBean.stderr.contains("SemanticException") &&
         e.execBean.stderr.contains("Partition not found")) {
-        String emsg = "Partition " + partition + " for table " 
+        String emsg = "Partition " + partition + " for table "
           + table + " does not exist" + db + "." + table + " does not exist";
         return JsonBuilder.create()
           .put("error", emsg)

Modified: hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java (original)
+++ hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java Thu Oct 31 18:27:31 2013
@@ -44,7 +44,7 @@ import java.util.Map;
 /**
  * A set of tests exercising e2e WebHCat DDL APIs.  These tests are somewhat
  * between WebHCat e2e (hcatalog/src/tests/e2e/templeton) tests and simple58
- * 
+ *
  * unit tests.  This will start a WebHCat server and make REST calls to it.
  * It doesn't need Hadoop or (standalone) metastore to be running.
  * Running this is much simpler than e2e tests.
@@ -75,7 +75,7 @@ public class TestWebHCatE2e {
       LOG.warn("Unable to find free port; using default: " + webhcatPort);
     }
     templetonBaseUrl = templetonBaseUrl.replace("50111", Integer.toString(webhcatPort));
-    templetonServer = new Main(new String[] {"-D" + 
+    templetonServer = new Main(new String[] {"-D" +
             AppConfig.UNIT_TEST_MODE + "=true", "-D" + AppConfig.PORT + "=" + webhcatPort});
     LOG.info("Starting Main; WebHCat using port: " + webhcatPort);
     templetonServer.run();
@@ -156,7 +156,7 @@ public class TestWebHCatE2e {
   public void createDataBase() throws IOException {
     Map<String, Object> props = new HashMap<String, Object>();
     props.put("comment", "Hello, there");
-    props.put("location", "file://" + System.getProperty("hive.metastore.warehouse.dir"));
+    props.put("location", System.getProperty("test.warehouse.dir"));
     Map<String, String> props2 = new HashMap<String, String>();
     props2.put("prop", "val");
     props.put("properties", props2);

Modified: hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestJobIDParser.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestJobIDParser.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestJobIDParser.java (original)
+++ hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestJobIDParser.java Thu Oct 31 18:27:31 2013
@@ -29,7 +29,7 @@ import junit.framework.Assert;
 public class TestJobIDParser {
   @Test
   public void testParsePig() throws IOException {
-    String errFileName = "../../src/test/data/status/pig";
+    String errFileName = "src/test/data/status/pig";
     PigJobIDParser pigJobIDParser = new PigJobIDParser(errFileName, new Configuration());
     List<String> jobs = pigJobIDParser.parseJobID();
     Assert.assertEquals(jobs.size(), 1);
@@ -37,7 +37,7 @@ public class TestJobIDParser {
 
   @Test
   public void testParseHive() throws IOException {
-    String errFileName = "../../src/test/data/status/hive";
+    String errFileName = "src/test/data/status/hive";
     HiveJobIDParser hiveJobIDParser = new HiveJobIDParser(errFileName, new Configuration());
     List<String> jobs = hiveJobIDParser.parseJobID();
     Assert.assertEquals(jobs.size(), 1);
@@ -45,7 +45,7 @@ public class TestJobIDParser {
 
   @Test
   public void testParseJar() throws IOException {
-    String errFileName = "../../src/test/data/status/jar";
+    String errFileName = "src/test/data/status/jar";
     JarJobIDParser jarJobIDParser = new JarJobIDParser(errFileName, new Configuration());
     List<String> jobs = jarJobIDParser.parseJobID();
     Assert.assertEquals(jobs.size(), 1);
@@ -53,7 +53,7 @@ public class TestJobIDParser {
 
   @Test
   public void testParseStreaming() throws IOException {
-    String errFileName = "../../src/test/data/status/streaming";
+    String errFileName = "src/test/data/status/streaming";
     JarJobIDParser jarJobIDParser = new JarJobIDParser(errFileName, new Configuration());
     List<String> jobs = jarJobIDParser.parseJobID();
     Assert.assertEquals(jobs.size(), 1);

Modified: hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java (original)
+++ hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java Thu Oct 31 18:27:31 2013
@@ -35,7 +35,7 @@ public class TestTempletonUtils {
     "2011-12-15 18:12:21,758 [main] INFO  org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher - More information at: http://localhost:50030/jobdetails.jsp?jobid=job_201112140012_0047",
     "2011-12-15 18:12:46,907 [main] INFO  org.apache.pig.tools.pigstats.SimplePigStats - Script Statistics: "
   };
-  public static final String testDataDir = System.getProperty("test.data.dir");
+  public static final String testDataDir = System.getProperty("test.tmp.dir");
   File tmpFile;
   File usrFile;
 

Added: hive/trunk/hwi/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hwi/pom.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/hwi/pom.xml (added)
+++ hive/trunk/hwi/pom.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,134 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive</groupId>
+    <artifactId>hive</artifactId>
+    <version>0.13.0-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-hwi</artifactId>
+  <packaging>jar</packaging>
+  <name>Hive HWI</name>
+
+  <properties>
+    <hive.path.to.root>..</hive.path.to.root>
+  </properties>
+
+  <dependencies>
+    <!-- intra-project -->
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-cli</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-shims</artifactId>
+      <version>${project.version}</version>
+      <classifier>uberjar</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <!-- inter-project -->
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <version>${commons-logging.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>jetty</artifactId>
+      <version>${jetty.version}</version>
+    </dependency>
+    <!-- test intra-project -->
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>${project.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <!-- test inter-project -->
+    <dependency>
+      <groupId>commons-httpclient</groupId>
+      <artifactId>commons-httpclient</artifactId>
+      <version>${commons-httpclient.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>${junit.version}</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+          <optional>true</optional>
+        </dependency>
+      </dependencies>
+    </profile>
+   <profile>
+      <id>hadoop-2</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+          <optional>true</optional>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+  <build>
+    <sourceDirectory>${basedir}/src/java</sourceDirectory>
+    <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-war-plugin</artifactId>
+        <configuration>
+          <warSourceDirectory>${basedir}/web</warSourceDirectory>
+          <packagingExcludes>WEB-INF/lib/*</packagingExcludes>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+</project>

Added: hive/trunk/itests/custom-serde/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/itests/custom-serde/pom.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/itests/custom-serde/pom.xml (added)
+++ hive/trunk/itests/custom-serde/pom.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,72 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive</groupId>
+    <artifactId>hive-it</artifactId>
+    <version>0.13.0-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-it-custom-serde</artifactId>
+  <packaging>jar</packaging>
+  <name>Hive Integration - Custom Serde</name>
+
+  <properties>
+    <hive.path.to.root>../..</hive.path.to.root>
+  </properties>
+
+  <dependencies>
+    <!-- intra-project -->
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>${project.version}</version>
+      <optional>true</optional>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+         <optional>true</optional>
+        </dependency>
+      </dependencies>
+    </profile>
+   <profile>
+      <id>hadoop-2</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+          <optional>true</optional>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+</project>

Added: hive/trunk/itests/hcatalog-unit/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hcatalog-unit/pom.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/itests/hcatalog-unit/pom.xml (added)
+++ hive/trunk/itests/hcatalog-unit/pom.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,227 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+     xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+     xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive</groupId>
+    <artifactId>hive-it</artifactId>
+    <version>0.13.0-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-hcatalog-it-unit</artifactId>
+  <packaging>jar</packaging>
+  <name>Hive Integration - HCatalog Unit Tests</name>
+
+  <properties>
+    <hive.path.to.root>../..</hive.path.to.root>
+  </properties>
+
+  <dependencies>
+    <!-- test intra-project -->
+    <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hcatalog-core</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hcatalog-core</artifactId>
+      <version>${project.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hbase-storage-handler</artifactId>
+      <version>${project.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hcatalog-pig-adapter</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-cli</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-common</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <!-- test inter-project -->
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+      <version>${commons-io.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase</artifactId>
+      <version>${hbase.version}</version>
+      <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.thrift</groupId>
+          <artifactId>libthrift</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>${junit.version}</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <!-- test -->
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-tools</artifactId>
+          <version>${hadoop-20S.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-test</artifactId>
+          <version>${hadoop-20S.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.pig</groupId>
+          <artifactId>pig</artifactId>
+          <version>${pig.version}</version>
+          <scope>test</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+   <profile>
+      <id>hadoop-2</id>
+      <dependencies>
+        <!-- test -->
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-annotations</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-archives</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.pig</groupId>
+          <artifactId>pig</artifactId>
+          <version>${pig.version}</version>
+          <classifier>h2</classifier>
+          <scope>test</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <goals>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>

Added: hive/trunk/itests/hive-unit/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/pom.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/itests/hive-unit/pom.xml (added)
+++ hive/trunk/itests/hive-unit/pom.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,207 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive</groupId>
+    <artifactId>hive-it</artifactId>
+    <version>0.13.0-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-it-unit</artifactId>
+  <packaging>jar</packaging>
+  <name>Hive Integration - Unit Tests</name>
+
+  <properties>
+    <hive.path.to.root>../..</hive.path.to.root>
+  </properties>
+
+  <dependencies>
+    <!-- test intra-project -->
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-common</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-beeline</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-cli</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-it-util</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-jdbc</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-serde</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-service</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-service</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <!-- test inter-project -->
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>${junit.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <version>${mockito-all.version}</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-test</artifactId>
+          <version>${hadoop-20S.version}</version>
+          <scope>test</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+   <profile>
+      <id>hadoop-2</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>setup-metastore-scripts</id>
+            <phase>process-test-resources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <mkdir dir="${test.tmp.dir}/scripts/metastore" />
+                <copy todir="${test.tmp.dir}/scripts/metastore">
+                  <fileset dir="${basedir}/${hive.path.to.root}/metastore/scripts/"/>
+                </copy>
+              </target>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+</project>

Added: hive/trunk/itests/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/itests/pom.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/itests/pom.xml (added)
+++ hive/trunk/itests/pom.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive</groupId>
+    <artifactId>hive</artifactId>
+    <version>0.13.0-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-it</artifactId>
+  <packaging>pom</packaging>
+  <name>Hive Integration - Parent</name>
+
+  <properties>
+    <hive.path.to.root>..</hive.path.to.root>
+  </properties>
+
+  <modules>
+   <module>custom-serde</module>
+   <module>hive-unit</module>
+   <module>hcatalog-unit</module>
+   <module>util</module>
+   <module>test-serde</module>
+   <module>qtest</module>
+  </modules>
+
+</project>



Mime
View raw message