hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From br...@apache.org
Subject svn commit: r1537576 [2/23] - in /hive/trunk: ./ ant/ ant/src/org/apache/hadoop/hive/ant/ beeline/ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/apache/hive/beeline/src/test/ cli/ common/ common/src/java/conf/ common/src/scripts/ commo...
Date Thu, 31 Oct 2013 18:27:46 GMT
Added: hive/trunk/common/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/common/pom.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/common/pom.xml (added)
+++ hive/trunk/common/pom.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,164 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive</groupId>
+    <artifactId>hive</artifactId>
+    <version>0.13.0-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-common</artifactId>
+  <packaging>jar</packaging>
+  <name>Hive Common</name>
+
+  <properties>
+    <hive.path.to.root>..</hive.path.to.root>
+  </properties>
+
+  <dependencies>
+    <!-- intra-proect -->
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-shims</artifactId>
+      <version>${project.version}</version>
+      <classifier>uberjar</classifier>
+    </dependency>
+    <!-- inter-project -->
+    <dependency>
+      <groupId>commons-cli</groupId>
+      <artifactId>commons-cli</artifactId>
+      <version>${commons-cli.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-lang</groupId>
+      <artifactId>commons-lang</artifactId>
+      <version>${commons-lang.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <version>${commons-logging.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <version>${log4j.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-compress</artifactId>
+      <version>${commons-compress.version}</version>
+    </dependency>
+    <!-- test inter-project -->
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>${junit.version}</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+         <optional>true</optional>
+        </dependency>
+      </dependencies>
+    </profile>
+   <profile>
+      <id>hadoop-2</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+          <optional>true</optional>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop-23.version}</version>
+          <optional>true</optional>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+  <build>
+    <sourceDirectory>${basedir}/src/java</sourceDirectory>
+    <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+    <scriptSourceDirectory>${basedir}/src/scripts</scriptSourceDirectory>
+    <testResources>
+      <testResource>
+        <directory>${basedir}/src/test/resources</directory>
+      </testResource>
+    </testResources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>generate-version-annotation</id>
+            <phase>generate-sources</phase>
+            <configuration>
+              <target>
+                <exec executable="bash" failonerror="true">
+                  <arg value="${basedir}/src/scripts/saveVersion.sh"/>
+                  <arg value="${project.version}"/>
+                  <arg value="${hive.version.shortname}"/>
+                  <arg value="${basedir}/src"/>
+                </exec>
+              </target>
+            </configuration>
+            <goals>
+              <goal>run</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>add-source</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>src/gen</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>

Modified: hive/trunk/common/src/java/conf/hive-log4j.properties
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/conf/hive-log4j.properties?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/common/src/java/conf/hive-log4j.properties (original)
+++ hive/trunk/common/src/java/conf/hive-log4j.properties Thu Oct 31 18:27:31 2013
@@ -53,7 +53,7 @@ log4j.appender.DRFA.layout.ConversionPat
 
 #
 # console
-# Add "console" to rootlogger above if you want to use this 
+# Add "console" to rootlogger above if you want to use this
 #
 
 log4j.appender.console=org.apache.log4j.ConsoleAppender

Modified: hive/trunk/common/src/scripts/saveVersion.sh
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/scripts/saveVersion.sh?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/common/src/scripts/saveVersion.sh (original)
+++ hive/trunk/common/src/scripts/saveVersion.sh Thu Oct 31 18:27:31 2013
@@ -33,7 +33,7 @@ dir=`pwd`
 cwd=`dirname $dir`
 if [ "$revision" = "" ]; then
     if git rev-parse HEAD 2>/dev/null > /dev/null ; then
-        revision=`git log -1 --pretty=format:"%H" ../`
+        revision=`git log -1 --pretty=format:"%H"`
         hostname=`hostname`
         branch=`git branch | sed -n -e 's/^* //p'`
         url="git://${hostname}${cwd}"
@@ -57,7 +57,13 @@ if [ "$url" = "" ]; then
     url="file://$cwd"
 fi
 
-srcChecksum=`find ../ -name '*.java' | grep -v generated-sources | LC_ALL=C sort | xargs md5sum | md5sum | cut -d ' ' -f 1`
+if [ -x /sbin/md5 ]; then
+  md5="/sbin/md5"
+else
+  md5="md5sum"
+fi
+
+srcChecksum=`find ../ -name '*.java' | grep -v generated-sources | LC_ALL=C sort | xargs $md5 | $md5 | cut -d ' ' -f 1`
 
 mkdir -p $src_dir/gen/org/apache/hive/common
 

Modified: hive/trunk/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java (original)
+++ hive/trunk/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java Thu Oct 31 18:27:31 2013
@@ -44,7 +44,7 @@ public class TestHiveLogging extends Tes
     process = null;
   }
 
-  private void configLog(String hiveLog4jTest, String hiveExecLog4jTest) 
+  private void configLog(String hiveLog4jTest, String hiveExecLog4jTest)
   throws Exception {
     String expectedLog4jTestPath = HiveTestUtils.getFileFromClasspath(hiveLog4jTest);
     String expectedLog4jExecPath = HiveTestUtils.getFileFromClasspath(hiveExecLog4jTest);

Added: hive/trunk/contrib/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/pom.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/contrib/pom.xml (added)
+++ hive/trunk/contrib/pom.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,111 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive</groupId>
+    <artifactId>hive</artifactId>
+    <version>0.13.0-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-contrib</artifactId>
+  <packaging>jar</packaging>
+  <name>Hive Contrib</name>
+
+  <properties>
+    <hive.path.to.root>..</hive.path.to.root>
+  </properties>
+
+  <dependencies>
+    <!-- intra-project -->
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-serde</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-shims</artifactId>
+      <version>${project.version}</version>
+      <classifier>uberjar</classifier>
+    </dependency>
+    <!-- inter-project -->
+    <dependency>
+      <groupId>commons-codec</groupId>
+      <artifactId>commons-codec</artifactId>
+      <version>${commons-codec.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <version>${commons-logging.version}</version>
+    </dependency>
+    <!-- test inter-project -->
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>${junit.version}</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+         <optional>true</optional>
+        </dependency>
+      </dependencies>
+    </profile>
+   <profile>
+      <id>hadoop-2</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+          <optional>true</optional>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop-23.version}</version>
+          <optional>true</optional>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+  <build>
+    <sourceDirectory>${basedir}/src/java</sourceDirectory>
+    <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+  </build>
+
+</project>

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java Thu Oct 31 18:27:31 2013
@@ -33,7 +33,7 @@ public class TestURLHook implements JDOC
   public String getJdoConnectionUrl(Configuration conf) throws Exception {
     if (originalUrl == null) {
       originalUrl = conf.get(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, "");
-      return "jdbc:derby:;databaseName=../build/test/junit_metastore_db_blank;create=true";
+      return "jdbc:derby:;databaseName=target/tmp/junit_metastore_db_blank;create=true";
     } else {
       return originalUrl;
     }

Modified: hive/trunk/contrib/src/test/queries/clientnegative/case_with_row_sequence.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientnegative/case_with_row_sequence.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientnegative/case_with_row_sequence.q (original)
+++ hive/trunk/contrib/src/test/queries/clientnegative/case_with_row_sequence.q Thu Oct 31 18:27:31 2013
@@ -1,6 +1,6 @@
 drop temporary function row_sequence;
 
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 create temporary function row_sequence as 
 'org.apache.hadoop.hive.contrib.udf.UDFRowSequence';
 

Modified: hive/trunk/contrib/src/test/queries/clientnegative/invalid_row_sequence.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientnegative/invalid_row_sequence.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientnegative/invalid_row_sequence.q (original)
+++ hive/trunk/contrib/src/test/queries/clientnegative/invalid_row_sequence.q Thu Oct 31 18:27:31 2013
@@ -2,7 +2,7 @@
 
 drop temporary function row_sequence;
 
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 create temporary function row_sequence as 
 'org.apache.hadoop.hive.contrib.udf.UDFRowSequence';

Modified: hive/trunk/contrib/src/test/queries/clientnegative/serde_regex.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientnegative/serde_regex.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientnegative/serde_regex.q (original)
+++ hive/trunk/contrib/src/test/queries/clientnegative/serde_regex.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 USE default;
 

Modified: hive/trunk/contrib/src/test/queries/clientnegative/udtf_explode2.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientnegative/udtf_explode2.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientnegative/udtf_explode2.q (original)
+++ hive/trunk/contrib/src/test/queries/clientnegative/udtf_explode2.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2';
 

Modified: hive/trunk/contrib/src/test/queries/clientnegative/url_hook.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientnegative/url_hook.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientnegative/url_hook.q (original)
+++ hive/trunk/contrib/src/test/queries/clientnegative/url_hook.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 set hive.metastore.force.reload.conf=true;
 SHOW TABLES 'src';
 set hive.metastore.ds.connection.url.hook=org.apache.hadoop.hive.contrib.metastore.hooks.TestURLHook;

Modified: hive/trunk/contrib/src/test/queries/clientpositive/dboutput.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/dboutput.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/dboutput.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/dboutput.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-ADD JAR ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+ADD JAR ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput';
 
@@ -7,7 +7,7 @@ set mapred.reduce.tasks.speculative.exec
 set mapred.map.tasks=1;
 set mapred.reduce.tasks=1;
 
-ADD JAR ${system:build.ivy.lib.dir}/default/derby-${system:derby.version}.jar;
+ADD JAR ${system:maven.local.repository}/org/apache/derby/derby/${system:derby.version}/derby-${system:derby.version}.jar;
 
 DESCRIBE FUNCTION dboutput;
 

Modified: hive/trunk/contrib/src/test/queries/clientpositive/fileformat_base64.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/fileformat_base64.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/fileformat_base64.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/fileformat_base64.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 DROP TABLE base64_test;
 

Modified: hive/trunk/contrib/src/test/queries/clientpositive/java_mr_example.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/java_mr_example.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/java_mr_example.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/java_mr_example.q Thu Oct 31 18:27:31 2013
@@ -1,10 +1,10 @@
 FROM (
   FROM src
    MAP value, key
- USING 'java -cp ${system:build.dir}/hive-contrib-${system:hive.version}.jar org.apache.hadoop.hive.contrib.mr.example.IdentityMapper'
+ USING 'java -cp ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar org.apache.hadoop.hive.contrib.mr.example.IdentityMapper'
     AS k, v
  CLUSTER BY k) map_output
   REDUCE k, v
-   USING 'java -cp ${system:build.dir}/hive-contrib-${system:hive.version}.jar org.apache.hadoop.hive.contrib.mr.example.WordCountReduce'
+   USING 'java -cp ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar org.apache.hadoop.hive.contrib.mr.example.WordCountReduce'
    AS k, v
 ;
\ No newline at end of file

Modified: hive/trunk/contrib/src/test/queries/clientpositive/lateral_view_explode2.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/lateral_view_explode2.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/lateral_view_explode2.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/lateral_view_explode2.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2';
 

Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_regex.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_regex.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/serde_regex.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/serde_regex.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 EXPLAIN
 CREATE TABLE serde_regex(
@@ -35,7 +35,7 @@ WITH SERDEPROPERTIES (
 )
 STORED AS TEXTFILE;
 
-LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex;
-LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex;
+LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex;
+LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex;
 
 SELECT * FROM serde_regex ORDER BY time;
\ No newline at end of file

Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_s3.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_s3.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/serde_s3.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/serde_s3.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 DROP TABLE s3log;
 CREATE TABLE s3log
@@ -7,7 +7,7 @@ STORED AS TEXTFILE;
 
 DESCRIBE s3log;
 
-LOAD DATA LOCAL INPATH '../contrib/data/files/s3.log' INTO TABLE s3log;
+LOAD DATA LOCAL INPATH '../../contrib/data/files/s3.log' INTO TABLE s3log;
 
 SELECT a.* FROM s3log a;
 

Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 drop table dest1;
 CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;

Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes2.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes2.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes2.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes2.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 drop table dest1;
 CREATE TABLE dest1(key SMALLINT, value STRING) STORED AS TEXTFILE;

Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes3.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes3.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes3.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes3.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 drop table dest1;
 CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE;

Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes4.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes4.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes4.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes4.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 drop table dest1;
 CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE;

Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes5.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes5.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes5.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes5.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 drop table dest1;
 CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
@@ -8,7 +8,7 @@ FROM (
   FROM src
   SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
   RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
-  USING 'python ../data/scripts/cat.py'
+  USING 'python ../../data/scripts/cat.py'
   AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
   RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
 ) tmap
@@ -18,7 +18,7 @@ FROM (
   FROM src
   SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
   RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
-  USING 'python ../data/scripts/cat.py'
+  USING 'python ../../data/scripts/cat.py'
   AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
   RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
 ) tmap

Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 DROP TABLE table1;
 

Modified: hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_avg.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_avg.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_avg.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_avg.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 CREATE TEMPORARY FUNCTION example_avg AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleAvg';
 

Modified: hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 CREATE TEMPORARY FUNCTION example_group_concat AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleGroupConcat';
 

Modified: hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 CREATE TEMPORARY FUNCTION example_max AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax';
 

Modified: hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max_n.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max_n.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max_n.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max_n.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 CREATE TEMPORARY FUNCTION example_max_n AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMaxN';
 

Modified: hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 CREATE TEMPORARY FUNCTION example_min AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMin';
 

Modified: hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min_n.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min_n.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min_n.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min_n.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 CREATE TEMPORARY FUNCTION example_min_n AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMinN';
 
 EXPLAIN

Modified: hive/trunk/contrib/src/test/queries/clientpositive/udf_example_add.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_add.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/udf_example_add.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/udf_example_add.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 CREATE TEMPORARY FUNCTION example_add AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleAdd';
 

Modified: hive/trunk/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 CREATE TEMPORARY FUNCTION example_arraysum    AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleArraySum';
 CREATE TEMPORARY FUNCTION example_mapconcat   AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleMapConcat';

Modified: hive/trunk/contrib/src/test/queries/clientpositive/udf_example_format.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_format.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/udf_example_format.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/udf_example_format.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 CREATE TEMPORARY FUNCTION example_format AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleFormat';
 

Modified: hive/trunk/contrib/src/test/queries/clientpositive/udf_row_sequence.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udf_row_sequence.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/udf_row_sequence.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/udf_row_sequence.q Thu Oct 31 18:27:31 2013
@@ -4,7 +4,7 @@
 
 drop temporary function row_sequence;
 
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 create temporary function row_sequence as 
 'org.apache.hadoop.hive.contrib.udf.UDFRowSequence';

Modified: hive/trunk/contrib/src/test/queries/clientpositive/udtf_explode2.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udtf_explode2.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/udtf_explode2.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/udtf_explode2.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2';
 

Modified: hive/trunk/contrib/src/test/queries/clientpositive/udtf_output_on_close.q
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udtf_output_on_close.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/queries/clientpositive/udtf_output_on_close.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/udtf_output_on_close.q Thu Oct 31 18:27:31 2013
@@ -1,4 +1,4 @@
-add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
+add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
 
 CREATE TEMPORARY FUNCTION udtfCount2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFCount2';
 

Modified: hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out Thu Oct 31 18:27:31 2013
@@ -92,16 +92,16 @@ WITH SERDEPROPERTIES (
 STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@serde_regex
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex
 PREHOOK: type: LOAD
 PREHOOK: Output: default@serde_regex
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@serde_regex
-PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex
 PREHOOK: type: LOAD
 PREHOOK: Output: default@serde_regex
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@serde_regex
 PREHOOK: query: SELECT * FROM serde_regex ORDER BY time

Modified: hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out Thu Oct 31 18:27:31 2013
@@ -32,10 +32,10 @@ totaltime           	int                
 turnaroundtime      	int                 	from deserializer   
 referer             	string              	from deserializer   
 useragent           	string              	from deserializer   
-PREHOOK: query: LOAD DATA LOCAL INPATH '../contrib/data/files/s3.log' INTO TABLE s3log
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../contrib/data/files/s3.log' INTO TABLE s3log
 PREHOOK: type: LOAD
 PREHOOK: Output: default@s3log
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../contrib/data/files/s3.log' INTO TABLE s3log
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../contrib/data/files/s3.log' INTO TABLE s3log
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@s3log
 PREHOOK: query: SELECT a.* FROM s3log a

Modified: hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out Thu Oct 31 18:27:31 2013
@@ -12,7 +12,7 @@ FROM (
   FROM src
   SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
   RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
-  USING 'python ../data/scripts/cat.py'
+  USING 'python ../../data/scripts/cat.py'
   AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
   RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
 ) tmap
@@ -23,14 +23,14 @@ FROM (
   FROM src
   SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
   RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
-  USING 'python ../data/scripts/cat.py'
+  USING 'python ../../data/scripts/cat.py'
   AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
   RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue
 POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe')) (TOK_RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter') 'python ../data/scripts/cat.py' (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe')) (TOK_RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader') (TOK_ALIASLIST tkey tvalue)))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe')) (TOK_RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter') 'python ../../data/scripts/cat.py' (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe')) (TOK_RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader') (TOK_ALIASLIST tkey tvalue)))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -57,7 +57,7 @@ STAGE PLANS:
                     type: string
               outputColumnNames: _col0, _col1
               Transform Operator
-                command: python ../data/scripts/cat.py
+                command: python ../../data/scripts/cat.py
                 output info:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -139,7 +139,7 @@ PREHOOK: query: FROM (
   FROM src
   SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
   RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
-  USING 'python ../data/scripts/cat.py'
+  USING 'python ../../data/scripts/cat.py'
   AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
   RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
 ) tmap
@@ -151,7 +151,7 @@ POSTHOOK: query: FROM (
   FROM src
   SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
   RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
-  USING 'python ../data/scripts/cat.py'
+  USING 'python ../../data/scripts/cat.py'
   AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
   RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
 ) tmap

Added: hive/trunk/data/conf/hive-log4j-new.properties
URL: http://svn.apache.org/viewvc/hive/trunk/data/conf/hive-log4j-new.properties?rev=1537576&view=auto
==============================================================================
--- hive/trunk/data/conf/hive-log4j-new.properties (added)
+++ hive/trunk/data/conf/hive-log4j-new.properties Thu Oct 31 18:27:31 2013
@@ -0,0 +1,78 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Define some default values that can be overridden by system properties
+hive.root.logger=DEBUG,DRFA
+hive.log.dir=${test.tmp.dir}/log/
+hive.log.file=hive.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hive.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=WARN
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#custom logging levels
+#log4j.logger.xxx=DEBUG
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
+
+
+log4j.category.DataNucleus=ERROR,DRFA
+log4j.category.Datastore=ERROR,DRFA
+log4j.category.Datastore.Schema=ERROR,DRFA
+log4j.category.JPOX.Datastore=ERROR,DRFA
+log4j.category.JPOX.Plugin=ERROR,DRFA
+log4j.category.JPOX.MetaData=ERROR,DRFA
+log4j.category.JPOX.Query=ERROR,DRFA
+log4j.category.JPOX.General=ERROR,DRFA
+log4j.category.JPOX.Enhancer=ERROR,DRFA
+log4j.logger.org.apache.hadoop.conf.Configuration=ERROR,DRFA
+

Modified: hive/trunk/data/conf/hive-log4j.properties
URL: http://svn.apache.org/viewvc/hive/trunk/data/conf/hive-log4j.properties?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/data/conf/hive-log4j.properties (original)
+++ hive/trunk/data/conf/hive-log4j.properties Thu Oct 31 18:27:31 2013
@@ -47,7 +47,7 @@ log4j.appender.DRFA.layout.ConversionPat
 
 #
 # console
-# Add "console" to rootlogger above if you want to use this 
+# Add "console" to rootlogger above if you want to use this
 #
 
 log4j.appender.console=org.apache.log4j.ConsoleAppender

Added: hive/trunk/data/conf/hive-site-new.xml
URL: http://svn.apache.org/viewvc/hive/trunk/data/conf/hive-site-new.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/data/conf/hive-site-new.xml (added)
+++ hive/trunk/data/conf/hive-site-new.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,189 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<configuration>
+
+<!-- Hive Configuration can either be stored in this file or in the hadoop configuration files  -->
+<!-- that are implied by Hadoop setup variables.                                                -->
+<!-- Aside from Hadoop setup variables - this file is provided as a convenience so that Hive    -->
+<!-- users do not have to edit hadoop configuration files (that may be managed as a centralized -->
+<!-- resource).                                                                                 -->
+
+<!-- Hive Execution Parameters -->
+<property>
+  <name>hadoop.tmp.dir</name>
+  <value>${test.tmp.dir}/hadoop-tmp</value>
+  <description>A base for other temporary directories.</description>
+</property>
+
+<!--
+<property>
+  <name>hive.exec.reducers.max</name>
+  <value>1</value>
+  <description>maximum number of reducers</description>
+</property>
+-->
+
+<property>
+  <name>hive.exec.scratchdir</name>
+  <value>${test.tmp.dir}/scratchdir</value>
+  <description>Scratch space for Hive jobs</description>
+</property>
+
+<property>
+  <name>hive.exec.local.scratchdir</name>
+  <value>${test.tmp.dir}/localscratchdir/</value>
+  <description>Local scratch space for Hive jobs</description>
+</property>
+
+<property>
+  <name>javax.jdo.option.ConnectionURL</name>
+  <value>jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true</value>
+</property>
+
+<property>
+  <name>hive.stats.dbconnectionstring</name>
+  <value>jdbc:derby:;databaseName=${test.tmp.dir}/TempStatsStore;create=true</value>
+</property>
+
+
+<property>
+  <name>javax.jdo.option.ConnectionDriverName</name>
+  <value>org.apache.derby.jdbc.EmbeddedDriver</value>
+</property>
+
+<property>
+  <name>javax.jdo.option.ConnectionUserName</name>
+  <value>APP</value>
+</property>
+
+<property>
+  <name>javax.jdo.option.ConnectionPassword</name>
+  <value>mine</value>
+</property>
+
+<property>
+  <!--  this should eventually be deprecated since the metastore should supply this -->
+  <name>hive.metastore.warehouse.dir</name>
+  <value>${test.warehouse.dir}</value>
+  <description></description>
+</property>
+
+<property>
+  <name>hive.metastore.metadb.dir</name>
+  <value>file://${test.tmp.dir}/metadb/</value>
+  <description>
+  Required by metastore server or if the uris argument below is not supplied
+  </description>
+</property>
+
+<property>
+  <name>test.log.dir</name>
+  <value>${test.tmp.dir}/log/</value>
+  <description></description>
+</property>
+
+<property>
+  <name>test.data.files</name>
+  <value>${hive.root}/data/files</value>
+  <description></description>
+</property>
+
+<property>
+  <name>hive.jar.path</name>
+  <value>${maven.local.repository}/org/apache/hive/hive-exec/${hive.version}/hive-exec-${hive.version}.jar</value>
+  <description></description>
+</property>
+
+<property>
+  <name>hive.metastore.rawstore.impl</name>
+  <value>org.apache.hadoop.hive.metastore.ObjectStore</value>
+  <description>Name of the class that implements org.apache.hadoop.hive.metastore.rawstore interface. This class is used to store and retrieval of raw metadata objects such as table, database</description>
+</property>
+
+<property>
+  <name>hive.querylog.location</name>
+  <value>${test.tmp.dir}/tmp</value>
+  <description>Location of the structured hive logs</description>
+</property>
+
+<property>
+  <name>hive.exec.pre.hooks</name>
+  <value>org.apache.hadoop.hive.ql.hooks.PreExecutePrinter, org.apache.hadoop.hive.ql.hooks.EnforceReadOnlyTables</value>
+  <description>Pre Execute Hook for Tests</description>
+</property>
+
+<property>
+  <name>hive.exec.post.hooks</name>
+  <value>org.apache.hadoop.hive.ql.hooks.PostExecutePrinter</value>
+  <description>Post Execute Hook for Tests</description>
+</property>
+
+<property>
+  <name>hive.task.progress</name>
+  <value>false</value>
+  <description>Track progress of a task</description>
+</property>
+
+<property>
+  <name>hive.support.concurrency</name>
+  <value>true</value>
+  <description>Whether hive supports concurrency or not. A zookeeper instance must be up and running for the default hive lock manager to support read-write locks.</description>
+</property>
+
+<property>
+  <name>fs.pfile.impl</name>
+  <value>org.apache.hadoop.fs.ProxyLocalFileSystem</value>
+  <description>A proxy for local file system used for cross file system testing</description>
+</property>
+
+<property>
+  <name>hive.exec.mode.local.auto</name>
+  <value>false</value>
+  <description>
+    Let hive determine whether to run in local mode automatically
+    Disabling this for tests so that minimr is not affected
+  </description>
+</property>
+
+<property>
+  <name>hive.auto.convert.join</name>
+  <value>false</value>
+  <description>Whether Hive enable the optimization about converting common join into mapjoin based on the input file size</description>
+</property>
+
+<property>
+  <name>hive.ignore.mapjoin.hint</name>
+  <value>false</value>
+  <description>Whether Hive ignores the mapjoin hint</description>
+</property>
+
+<property>
+  <name>hive.input.format</name>
+  <value>org.apache.hadoop.hive.ql.io.CombineHiveInputFormat</value>
+  <description>The default input format, if it is not specified, the system assigns it. It is set to HiveInputFormat for hadoop versions 17, 18 and 19, whereas it is set to CombineHiveInputFormat for hadoop 20. The user can always overwrite it - if there is a bug in CombineHiveInputFormat, it can always be manually set to HiveInputFormat. </description>
+</property>
+
+<property>
+  <name>hive.default.rcfile.serde</name>
+  <value>org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe</value>
+  <description>The default SerDe hive will use for the rcfile format</description>
+</property>
+
+</configuration>

Modified: hive/trunk/data/files/symlink1.txt
URL: http://svn.apache.org/viewvc/hive/trunk/data/files/symlink1.txt?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/data/files/symlink1.txt (original)
+++ hive/trunk/data/files/symlink1.txt Thu Oct 31 18:27:31 2013
@@ -1,2 +1,2 @@
-../data/files/T1.txt
-../data/files/T3.txt
+../../data/files/T1.txt
+../../data/files/T3.txt

Modified: hive/trunk/data/files/symlink2.txt
URL: http://svn.apache.org/viewvc/hive/trunk/data/files/symlink2.txt?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/data/files/symlink2.txt (original)
+++ hive/trunk/data/files/symlink2.txt Thu Oct 31 18:27:31 2013
@@ -1 +1 @@
-../data/files/T2.txt
+../../data/files/T2.txt

Added: hive/trunk/hbase-handler/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/pom.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/hbase-handler/pom.xml (added)
+++ hive/trunk/hbase-handler/pom.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,143 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive</groupId>
+    <artifactId>hive</artifactId>
+    <version>0.13.0-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-hbase-handler</artifactId>
+  <packaging>jar</packaging>
+  <name>Hive HBase Handler</name>
+
+  <properties>
+    <hive.path.to.root>..</hive.path.to.root>
+  </properties>
+
+  <dependencies>
+    <!-- intra-project -->
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-serde</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-service</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-shims</artifactId>
+      <version>${project.version}</version>
+      <classifier>uberjar</classifier>
+    </dependency>
+    <!-- inter-project -->
+    <dependency>
+      <groupId>commons-lang</groupId>
+      <artifactId>commons-lang</artifactId>
+      <version>${commons-lang.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <version>${commons-logging.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase</artifactId>
+      <version>${hbase.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.thrift</groupId>
+          <artifactId>libthrift</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <!-- test inter-project -->
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>${junit.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase</artifactId>
+      <version>${hbase.version}</version>
+      <classifier>tests</classifier>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+         <optional>true</optional>
+        </dependency>
+      </dependencies>
+    </profile>
+   <profile>
+      <id>hadoop-2</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+          <optional>true</optional>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop-23.version}</version>
+          <optional>true</optional>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+  <build>
+    <sourceDirectory>${basedir}/src/java</sourceDirectory>
+    <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+  </build>
+
+</project>

Modified: hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java (original)
+++ hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java Thu Oct 31 18:27:31 2013
@@ -58,9 +58,9 @@ public class HBaseTestSetup extends Test
   }
 
   void preTest(HiveConf conf) throws Exception {
-	
+
     setUpFixtures(conf);
-	
+
     conf.set("hbase.rootdir", hbaseRoot);
     conf.set("hbase.master", hbaseCluster.getMaster().getServerName().getHostAndPort());
     conf.set("hbase.zookeeper.property.clientPort", Integer.toString(zooKeeperPort));
@@ -73,15 +73,15 @@ public class HBaseTestSetup extends Test
   }
 
   private void setUpFixtures(HiveConf conf) throws Exception {
-    /* We are not starting zookeeper server here because 
+    /* We are not starting zookeeper server here because
      * QTestUtil already starts it.
      */
     int zkPort = conf.getInt("hive.zookeeper.client.port", -1);
     if ((zkPort == zooKeeperPort) && (hbaseCluster != null)) {
-    	return;
+      return;
     }
     zooKeeperPort = zkPort;
-    String tmpdir =  System.getProperty("user.dir")+"/../build/ql/tmp";
+    String tmpdir =  System.getProperty("test.tmp.dir");
     this.tearDown();
     conf.set("hbase.master", "local");
 

Modified: hive/trunk/hbase-handler/src/test/queries/negative/cascade_dbdrop_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/queries/negative/cascade_dbdrop_hadoop20.q?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/test/queries/negative/cascade_dbdrop_hadoop20.q (original)
+++ hive/trunk/hbase-handler/src/test/queries/negative/cascade_dbdrop_hadoop20.q Thu Oct 31 18:27:31 2013
@@ -16,11 +16,11 @@ STORED BY 'org.apache.hadoop.hive.hbase.
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string")
 TBLPROPERTIES ("hbase.table.name" = "hbase_table_0");
 
-dfs -ls ../build/ql/tmp/hbase/hbase_table_0;
+dfs -ls target/tmp/hbase/hbase_table_0;
 
 DROP DATABASE IF EXISTS hbaseDB CASCADE;
 
-dfs -ls ../build/ql/tmp/hbase/hbase_table_0;
+dfs -ls target/tmp/hbase/hbase/hbase_table_0;
 
 
 

Added: hive/trunk/hcatalog/core/.gitignore
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/.gitignore?rev=1537576&view=auto
==============================================================================
--- hive/trunk/hcatalog/core/.gitignore (added)
+++ hive/trunk/hcatalog/core/.gitignore Thu Oct 31 18:27:31 2013
@@ -0,0 +1 @@
+mapred

Added: hive/trunk/hcatalog/core/pom-new.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/pom-new.xml?rev=1537576&view=auto
==============================================================================
--- hive/trunk/hcatalog/core/pom-new.xml (added)
+++ hive/trunk/hcatalog/core/pom-new.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,177 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+     xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+     xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive.hcatalog</groupId>
+    <artifactId>hive-hcatalog</artifactId>
+    <version>0.13.0-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-hcatalog-core</artifactId>
+  <packaging>jar</packaging>
+  <name>Hive HCatalog Core</name>
+
+  <properties>
+    <hive.path.to.root>../..</hive.path.to.root>
+  </properties>
+
+  <dependencies>
+    <!-- intra-project -->
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-cli</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <!-- inter-project -->
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+      <version>${guava.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-tools</artifactId>
+          <version>${hadoop-20S.version}</version>
+        </dependency>
+        <!-- test -->
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-test</artifactId>
+          <version>${hadoop-20S.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.pig</groupId>
+          <artifactId>pig</artifactId>
+          <version>${pig.version}</version>
+          <scope>test</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+   <profile>
+      <id>hadoop-2</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-annotations</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-archives</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+        <!-- test -->
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.pig</groupId>
+          <artifactId>pig</artifactId>
+          <version>${pig.version}</version>
+          <classifier>h2</classifier>
+          <scope>test</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <goals>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java Thu Oct 31 18:27:31 2013
@@ -108,6 +108,7 @@ public class TestHCatMultiOutputFormat {
         HiveMetaStore.main(new String[]{"-v", "-p", msPort, "--hiveconf", warehouseConf});
       } catch (Throwable t) {
         System.err.println("Exiting. Got exception from metastore: " + t.getMessage());
+        t.printStackTrace();
       }
     }
 
@@ -163,13 +164,14 @@ public class TestHCatMultiOutputFormat {
 
   @BeforeClass
   public static void setup() throws Exception {
-    String testDir = System.getProperty("test.data.dir", "./");
+    System.clearProperty("mapred.job.tracker");
+    String testDir = System.getProperty("test.tmp.dir", "./");
     testDir = testDir + "/test_multitable_" + Math.abs(new Random().nextLong()) + "/";
     workDir = new File(new File(testDir).getCanonicalPath());
     FileUtil.fullyDelete(workDir);
     workDir.mkdirs();
 
-    warehousedir = new Path(workDir + "/warehouse");
+    warehousedir = new Path(System.getProperty("test.warehouse.dir"));
 
     // Run hive metastore server
     t = new Thread(new RunMS());
@@ -186,9 +188,10 @@ public class TestHCatMultiOutputFormat {
     mrCluster = new MiniMRCluster(1, fs.getUri().toString(), 1, null, null,
       new JobConf(conf));
     mrConf = mrCluster.createJobConf();
-    fs.mkdirs(warehousedir);
 
     initializeSetup();
+
+    warehousedir.getFileSystem(hiveConf).mkdirs(warehousedir);
   }
 
   private static void initializeSetup() throws Exception {
@@ -251,14 +254,15 @@ public class TestHCatMultiOutputFormat {
     tbl.setPartitionKeys(ColumnHolder.partitionCols);
 
     hmsc.createTable(tbl);
-    FileSystem fs = FileSystem.get(mrConf);
-    fs.setPermission(new Path(warehousedir, tableName), new FsPermission(tablePerm));
+    Path path = new Path(warehousedir, tableName);
+    FileSystem fs = path.getFileSystem(hiveConf);
+    fs.setPermission(path, new FsPermission(tablePerm));
   }
 
   @AfterClass
   public static void tearDown() throws IOException {
     FileUtil.fullyDelete(workDir);
-    FileSystem fs = FileSystem.get(mrConf);
+    FileSystem fs = warehousedir.getFileSystem(hiveConf);
     if (fs.exists(warehousedir)) {
       fs.delete(warehousedir, true);
     }
@@ -367,14 +371,14 @@ public class TestHCatMultiOutputFormat {
    * @throws Exception if any error occurs
    */
   private List<String> getTableData(String table, String database) throws Exception {
-    HiveConf conf = new HiveConf();
-    conf.addResource("hive-site.xml");
     ArrayList<String> results = new ArrayList<String>();
     ArrayList<String> temp = new ArrayList<String>();
-    Hive hive = Hive.get(conf);
+    Hive hive = Hive.get(hiveConf);
     org.apache.hadoop.hive.ql.metadata.Table tbl = hive.getTable(database, table);
     FetchWork work;
-    if (!tbl.getPartCols().isEmpty()) {
+    if (tbl.getPartCols().isEmpty()) {
+      work = new FetchWork(tbl.getDataLocation().toString(), Utilities.getTableDesc(tbl));
+    } else {
       List<Partition> partitions = hive.getPartitions(tbl);
       List<PartitionDesc> partDesc = new ArrayList<PartitionDesc>();
       List<String> partLocs = new ArrayList<String>();
@@ -384,12 +388,10 @@ public class TestHCatMultiOutputFormat {
       }
       work = new FetchWork(partLocs, partDesc, Utilities.getTableDesc(tbl));
       work.setLimit(100);
-    } else {
-      work = new FetchWork(tbl.getDataLocation().toString(), Utilities.getTableDesc(tbl));
     }
     FetchTask task = new FetchTask();
     task.setWork(work);
-    task.initialize(conf, null, null);
+    task.initialize(hiveConf, null, null);
     task.fetch(temp);
     for (String str : temp) {
       results.add(str.replace("\t", ","));

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java Thu Oct 31 18:27:31 2013
@@ -51,6 +51,7 @@ import org.apache.hadoop.mapred.MiniMRCl
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+import org.apache.hadoop.util.Shell;
 import org.apache.hcatalog.NoExitSecurityManager;
 import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
 import org.apache.hcatalog.data.DefaultHCatRecord;
@@ -74,10 +75,10 @@ public class TestHCatPartitionPublish {
   private static HiveConf hcatConf;
   private static HiveMetaStoreClient msc;
   private static SecurityManager securityManager;
+  private static Configuration conf = new Configuration(true);
 
   @BeforeClass
   public static void setup() throws Exception {
-    Configuration conf = new Configuration(true);
     conf.set("yarn.scheduler.capacity.root.queues", "default");
     conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
 
@@ -150,10 +151,13 @@ public class TestHCatPartitionPublish {
     Assert.assertEquals(0, ptns.size());
     Table table = msc.getTable(dbName, tableName);
     Assert.assertTrue(table != null);
-    // Also make sure that the directory has been deleted in the table
-    // location.
-    Assert.assertFalse(fs.exists(new Path(table.getSd().getLocation()
-        + "/part1=p1value1/part0=p0value1")));
+    // In Windows, we cannot remove the output directory when job fail. See
+    // FileOutputCommitterContainer.abortJob
+    if (!Shell.WINDOWS) {
+      Path path = new Path(table.getSd().getLocation()
+          + "/part1=p1value1/part0=p0value1");
+      Assert.assertFalse(path.getFileSystem(conf).exists(path));
+    }
   }
 
   void runMRCreateFail(

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestMultiOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestMultiOutputFormat.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestMultiOutputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestMultiOutputFormat.java Thu Oct 31 18:27:31 2013
@@ -88,7 +88,7 @@ public class TestMultiOutputFormat {
   }
 
   private static void createWorkDir() throws IOException {
-    String testDir = System.getProperty("test.data.dir", "./");
+    String testDir = System.getProperty("test.tmp.dir", "./");
     testDir = testDir + "/test_multiout_" + Math.abs(new Random().nextLong()) + "/";
     workDir = new File(new File(testDir).getCanonicalPath());
     FileUtil.fullyDelete(workDir);

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java Thu Oct 31 18:27:31 2013
@@ -69,7 +69,7 @@ public class TestSequenceFileReadWrite {
 
   @Before
   public void setup() throws Exception {
-    dataDir = new File(System.getProperty("java.io.tmpdir") + File.separator + 
+    dataDir = new File(System.getProperty("java.io.tmpdir") + File.separator +
         TestSequenceFileReadWrite.class.getCanonicalName() + "-" + System.currentTimeMillis());
     hiveConf = new HiveConf(this.getClass());
     warehouseDir = new File(dataDir, "warehouse").getAbsolutePath();
@@ -102,7 +102,7 @@ public class TestSequenceFileReadWrite {
       FileUtils.deleteDirectory(dataDir);
     }
   }
-  
+
   @Test
   public void testSequenceTableWriteRead() throws Exception {
     String createTable = "CREATE TABLE demo_table(a0 int, a1 String, a2 String) STORED AS SEQUENCEFILE";

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java Thu Oct 31 18:27:31 2013
@@ -68,7 +68,7 @@ public class TestRCFileMapReduceInputFor
   static {
     try {
       fs = FileSystem.getLocal(conf);
-      Path dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
+      Path dir = new Path(System.getProperty("test.tmp.dir", ".") + "/mapred");
       file = new Path(dir, "test_rcfile");
       fs.delete(dir, true);
       // the SerDe part is from TestLazySimpleSerDe
@@ -203,7 +203,7 @@ public class TestRCFileMapReduceInputFor
   private void writeThenReadByRecordReader(int intervalRecordCount,
                        int writeCount, int splitNumber, long maxSplitSize, CompressionCodec codec)
     throws IOException, InterruptedException {
-    Path testDir = new Path(System.getProperty("test.data.dir", ".")
+    Path testDir = new Path(System.getProperty("test.tmp.dir", ".")
       + "/mapred/testsmallfirstsplit");
     Path testFile = new Path(testDir, "test_rcfile");
     fs.delete(testFile, true);

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java Thu Oct 31 18:27:31 2013
@@ -62,7 +62,9 @@ public class MiniCluster {
       Configuration config = new Configuration();
 
       // Builds and starts the mini dfs and mapreduce clusters
-      System.setProperty("hadoop.log.dir", ".");
+      if(System.getProperty("hadoop.log.dir") == null) {
+        System.setProperty("hadoop.log.dir", "target/tmp/logs/");
+      }
       m_dfs = new MiniDFSCluster(config, dataNodes, true, null);
 
       m_fileSys = m_dfs.getFileSystem();

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java?rev=1537576&r1=1537575&r2=1537576&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java Thu Oct 31 18:27:31 2013
@@ -105,6 +105,7 @@ public class TestHCatMultiOutputFormat {
         HiveMetaStore.main(new String[]{"-v", "-p", msPort, "--hiveconf", warehouseConf});
       } catch (Throwable t) {
         System.err.println("Exiting. Got exception from metastore: " + t.getMessage());
+        t.printStackTrace();
       }
     }
 
@@ -160,13 +161,14 @@ public class TestHCatMultiOutputFormat {
 
   @BeforeClass
   public static void setup() throws Exception {
-    String testDir = System.getProperty("test.data.dir", "./");
+    System.clearProperty("mapred.job.tracker");
+    String testDir = System.getProperty("test.tmp.dir", "./");
     testDir = testDir + "/test_multitable_" + Math.abs(new Random().nextLong()) + "/";
     workDir = new File(new File(testDir).getCanonicalPath());
     FileUtil.fullyDelete(workDir);
     workDir.mkdirs();
 
-    warehousedir = new Path(workDir + "/warehouse");
+    warehousedir = new Path(System.getProperty("test.warehouse.dir"));
 
     // Run hive metastore server
     t = new Thread(new RunMS());
@@ -183,9 +185,10 @@ public class TestHCatMultiOutputFormat {
     mrCluster = new MiniMRCluster(1, fs.getUri().toString(), 1, null, null,
       new JobConf(conf));
     mrConf = mrCluster.createJobConf();
-    fs.mkdirs(warehousedir);
 
     initializeSetup();
+
+    warehousedir.getFileSystem(conf).mkdirs(warehousedir);
   }
 
   private static void initializeSetup() throws Exception {
@@ -248,14 +251,15 @@ public class TestHCatMultiOutputFormat {
     tbl.setPartitionKeys(ColumnHolder.partitionCols);
 
     hmsc.createTable(tbl);
-    FileSystem fs = FileSystem.get(mrConf);
-    fs.setPermission(new Path(warehousedir, tableName), new FsPermission(tablePerm));
+    Path path = new Path(warehousedir, tableName);
+    FileSystem fs = path.getFileSystem(hiveConf);
+    fs.setPermission(path, new FsPermission(tablePerm));
   }
 
   @AfterClass
   public static void tearDown() throws IOException {
     FileUtil.fullyDelete(workDir);
-    FileSystem fs = FileSystem.get(mrConf);
+    FileSystem fs = warehousedir.getFileSystem(hiveConf);
     if (fs.exists(warehousedir)) {
       fs.delete(warehousedir, true);
     }
@@ -312,7 +316,8 @@ public class TestHCatMultiOutputFormat {
     Assert.assertEquals("Comparing output of table " +
       tableNames[0] + " is not correct", outputs.get(0), "a,a,1,ag");
     Assert.assertEquals("Comparing output of table " +
-      tableNames[1] + " is not correct", outputs.get(1), "a,1,ag");
+      tableNames[1] + " is not correct", outputs.get(1),
+      "a,1,ag");
     Assert.assertEquals("Comparing output of table " +
       tableNames[2] + " is not correct", outputs.get(2), "a,a,extra,1,ag");
 



Mime
View raw message