Return-Path: X-Original-To: apmail-hive-commits-archive@www.apache.org Delivered-To: apmail-hive-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 8DDF910796 for ; Thu, 31 Oct 2013 18:28:53 +0000 (UTC) Received: (qmail 8076 invoked by uid 500); 31 Oct 2013 18:28:53 -0000 Delivered-To: apmail-hive-commits-archive@hive.apache.org Received: (qmail 7975 invoked by uid 500); 31 Oct 2013 18:28:53 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 7908 invoked by uid 99); 31 Oct 2013 18:28:52 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 31 Oct 2013 18:28:52 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 31 Oct 2013 18:28:45 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 39B4E238896F; Thu, 31 Oct 2013 18:28:25 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1537576 [2/23] - in /hive/trunk: ./ ant/ ant/src/org/apache/hadoop/hive/ant/ beeline/ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/apache/hive/beeline/src/test/ cli/ common/ common/src/java/conf/ common/src/scripts/ commo... Date: Thu, 31 Oct 2013 18:27:46 -0000 To: commits@hive.apache.org From: brock@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20131031182825.39B4E238896F@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Added: hive/trunk/common/pom.xml URL: http://svn.apache.org/viewvc/hive/trunk/common/pom.xml?rev=1537576&view=auto ============================================================================== --- hive/trunk/common/pom.xml (added) +++ hive/trunk/common/pom.xml Thu Oct 31 18:27:31 2013 @@ -0,0 +1,164 @@ + + + + 4.0.0 + + org.apache.hive + hive + 0.13.0-SNAPSHOT + ../pom.xml + + + hive-common + jar + Hive Common + + + .. + + + + + + org.apache.hive + hive-shims + ${project.version} + uberjar + + + + commons-cli + commons-cli + ${commons-cli.version} + + + commons-lang + commons-lang + ${commons-lang.version} + + + commons-logging + commons-logging + ${commons-logging.version} + + + log4j + log4j + ${log4j.version} + + + org.apache.commons + commons-compress + ${commons-compress.version} + + + + junit + junit + ${junit.version} + test + + + + + + hadoop-1 + + true + + + + org.apache.hadoop + hadoop-core + ${hadoop-20S.version} + true + + + + + hadoop-2 + + + org.apache.hadoop + hadoop-common + ${hadoop-23.version} + true + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop-23.version} + true + + + + + + + ${basedir}/src/java + ${basedir}/src/test + ${basedir}/src/scripts + + + ${basedir}/src/test/resources + + + + + org.apache.maven.plugins + maven-antrun-plugin + + + generate-version-annotation + generate-sources + + + + + + + + + + + + run + + + + + + org.codehaus.mojo + build-helper-maven-plugin + + + add-source + generate-sources + + add-source + + + + src/gen + + + + + + + + Modified: hive/trunk/common/src/java/conf/hive-log4j.properties URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/conf/hive-log4j.properties?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/common/src/java/conf/hive-log4j.properties (original) +++ hive/trunk/common/src/java/conf/hive-log4j.properties Thu Oct 31 18:27:31 2013 @@ -53,7 +53,7 @@ log4j.appender.DRFA.layout.ConversionPat # # console -# Add "console" to rootlogger above if you want to use this +# Add "console" to rootlogger above if you want to use this # log4j.appender.console=org.apache.log4j.ConsoleAppender Modified: hive/trunk/common/src/scripts/saveVersion.sh URL: http://svn.apache.org/viewvc/hive/trunk/common/src/scripts/saveVersion.sh?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/common/src/scripts/saveVersion.sh (original) +++ hive/trunk/common/src/scripts/saveVersion.sh Thu Oct 31 18:27:31 2013 @@ -33,7 +33,7 @@ dir=`pwd` cwd=`dirname $dir` if [ "$revision" = "" ]; then if git rev-parse HEAD 2>/dev/null > /dev/null ; then - revision=`git log -1 --pretty=format:"%H" ../` + revision=`git log -1 --pretty=format:"%H"` hostname=`hostname` branch=`git branch | sed -n -e 's/^* //p'` url="git://${hostname}${cwd}" @@ -57,7 +57,13 @@ if [ "$url" = "" ]; then url="file://$cwd" fi -srcChecksum=`find ../ -name '*.java' | grep -v generated-sources | LC_ALL=C sort | xargs md5sum | md5sum | cut -d ' ' -f 1` +if [ -x /sbin/md5 ]; then + md5="/sbin/md5" +else + md5="md5sum" +fi + +srcChecksum=`find ../ -name '*.java' | grep -v generated-sources | LC_ALL=C sort | xargs $md5 | $md5 | cut -d ' ' -f 1` mkdir -p $src_dir/gen/org/apache/hive/common Modified: hive/trunk/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java URL: http://svn.apache.org/viewvc/hive/trunk/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java (original) +++ hive/trunk/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java Thu Oct 31 18:27:31 2013 @@ -44,7 +44,7 @@ public class TestHiveLogging extends Tes process = null; } - private void configLog(String hiveLog4jTest, String hiveExecLog4jTest) + private void configLog(String hiveLog4jTest, String hiveExecLog4jTest) throws Exception { String expectedLog4jTestPath = HiveTestUtils.getFileFromClasspath(hiveLog4jTest); String expectedLog4jExecPath = HiveTestUtils.getFileFromClasspath(hiveExecLog4jTest); Added: hive/trunk/contrib/pom.xml URL: http://svn.apache.org/viewvc/hive/trunk/contrib/pom.xml?rev=1537576&view=auto ============================================================================== --- hive/trunk/contrib/pom.xml (added) +++ hive/trunk/contrib/pom.xml Thu Oct 31 18:27:31 2013 @@ -0,0 +1,111 @@ + + + + 4.0.0 + + org.apache.hive + hive + 0.13.0-SNAPSHOT + ../pom.xml + + + hive-contrib + jar + Hive Contrib + + + .. + + + + + + org.apache.hive + hive-exec + ${project.version} + + + org.apache.hive + hive-serde + ${project.version} + + + org.apache.hive + hive-shims + ${project.version} + uberjar + + + + commons-codec + commons-codec + ${commons-codec.version} + + + commons-logging + commons-logging + ${commons-logging.version} + + + + junit + junit + ${junit.version} + test + + + + + + hadoop-1 + + true + + + + org.apache.hadoop + hadoop-core + ${hadoop-20S.version} + true + + + + + hadoop-2 + + + org.apache.hadoop + hadoop-common + ${hadoop-23.version} + true + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop-23.version} + true + + + + + + + ${basedir}/src/java + ${basedir}/src/test + + + Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java (original) +++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/TestURLHook.java Thu Oct 31 18:27:31 2013 @@ -33,7 +33,7 @@ public class TestURLHook implements JDOC public String getJdoConnectionUrl(Configuration conf) throws Exception { if (originalUrl == null) { originalUrl = conf.get(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, ""); - return "jdbc:derby:;databaseName=../build/test/junit_metastore_db_blank;create=true"; + return "jdbc:derby:;databaseName=target/tmp/junit_metastore_db_blank;create=true"; } else { return originalUrl; } Modified: hive/trunk/contrib/src/test/queries/clientnegative/case_with_row_sequence.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientnegative/case_with_row_sequence.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientnegative/case_with_row_sequence.q (original) +++ hive/trunk/contrib/src/test/queries/clientnegative/case_with_row_sequence.q Thu Oct 31 18:27:31 2013 @@ -1,6 +1,6 @@ drop temporary function row_sequence; -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; create temporary function row_sequence as 'org.apache.hadoop.hive.contrib.udf.UDFRowSequence'; Modified: hive/trunk/contrib/src/test/queries/clientnegative/invalid_row_sequence.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientnegative/invalid_row_sequence.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientnegative/invalid_row_sequence.q (original) +++ hive/trunk/contrib/src/test/queries/clientnegative/invalid_row_sequence.q Thu Oct 31 18:27:31 2013 @@ -2,7 +2,7 @@ drop temporary function row_sequence; -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; create temporary function row_sequence as 'org.apache.hadoop.hive.contrib.udf.UDFRowSequence'; Modified: hive/trunk/contrib/src/test/queries/clientnegative/serde_regex.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientnegative/serde_regex.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientnegative/serde_regex.q (original) +++ hive/trunk/contrib/src/test/queries/clientnegative/serde_regex.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; USE default; Modified: hive/trunk/contrib/src/test/queries/clientnegative/udtf_explode2.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientnegative/udtf_explode2.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientnegative/udtf_explode2.q (original) +++ hive/trunk/contrib/src/test/queries/clientnegative/udtf_explode2.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2'; Modified: hive/trunk/contrib/src/test/queries/clientnegative/url_hook.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientnegative/url_hook.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientnegative/url_hook.q (original) +++ hive/trunk/contrib/src/test/queries/clientnegative/url_hook.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; set hive.metastore.force.reload.conf=true; SHOW TABLES 'src'; set hive.metastore.ds.connection.url.hook=org.apache.hadoop.hive.contrib.metastore.hooks.TestURLHook; Modified: hive/trunk/contrib/src/test/queries/clientpositive/dboutput.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/dboutput.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/dboutput.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/dboutput.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -ADD JAR ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +ADD JAR ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput'; @@ -7,7 +7,7 @@ set mapred.reduce.tasks.speculative.exec set mapred.map.tasks=1; set mapred.reduce.tasks=1; -ADD JAR ${system:build.ivy.lib.dir}/default/derby-${system:derby.version}.jar; +ADD JAR ${system:maven.local.repository}/org/apache/derby/derby/${system:derby.version}/derby-${system:derby.version}.jar; DESCRIBE FUNCTION dboutput; Modified: hive/trunk/contrib/src/test/queries/clientpositive/fileformat_base64.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/fileformat_base64.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/fileformat_base64.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/fileformat_base64.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; DROP TABLE base64_test; Modified: hive/trunk/contrib/src/test/queries/clientpositive/java_mr_example.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/java_mr_example.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/java_mr_example.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/java_mr_example.q Thu Oct 31 18:27:31 2013 @@ -1,10 +1,10 @@ FROM ( FROM src MAP value, key - USING 'java -cp ${system:build.dir}/hive-contrib-${system:hive.version}.jar org.apache.hadoop.hive.contrib.mr.example.IdentityMapper' + USING 'java -cp ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar org.apache.hadoop.hive.contrib.mr.example.IdentityMapper' AS k, v CLUSTER BY k) map_output REDUCE k, v - USING 'java -cp ${system:build.dir}/hive-contrib-${system:hive.version}.jar org.apache.hadoop.hive.contrib.mr.example.WordCountReduce' + USING 'java -cp ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar org.apache.hadoop.hive.contrib.mr.example.WordCountReduce' AS k, v ; \ No newline at end of file Modified: hive/trunk/contrib/src/test/queries/clientpositive/lateral_view_explode2.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/lateral_view_explode2.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/lateral_view_explode2.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/lateral_view_explode2.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2'; Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_regex.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_regex.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/serde_regex.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/serde_regex.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; EXPLAIN CREATE TABLE serde_regex( @@ -35,7 +35,7 @@ WITH SERDEPROPERTIES ( ) STORED AS TEXTFILE; -LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex; -LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex; +LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex; +LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex; SELECT * FROM serde_regex ORDER BY time; \ No newline at end of file Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_s3.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_s3.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/serde_s3.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/serde_s3.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; DROP TABLE s3log; CREATE TABLE s3log @@ -7,7 +7,7 @@ STORED AS TEXTFILE; DESCRIBE s3log; -LOAD DATA LOCAL INPATH '../contrib/data/files/s3.log' INTO TABLE s3log; +LOAD DATA LOCAL INPATH '../../contrib/data/files/s3.log' INTO TABLE s3log; SELECT a.* FROM s3log a; Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; drop table dest1; CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes2.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes2.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes2.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes2.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; drop table dest1; CREATE TABLE dest1(key SMALLINT, value STRING) STORED AS TEXTFILE; Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes3.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes3.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes3.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes3.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; drop table dest1; CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE; Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes4.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes4.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes4.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes4.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; drop table dest1; CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE; Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes5.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes5.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes5.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes5.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; drop table dest1; CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; @@ -8,7 +8,7 @@ FROM ( FROM src SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter' - USING 'python ../data/scripts/cat.py' + USING 'python ../../data/scripts/cat.py' AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader' ) tmap @@ -18,7 +18,7 @@ FROM ( FROM src SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter' - USING 'python ../data/scripts/cat.py' + USING 'python ../../data/scripts/cat.py' AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader' ) tmap Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; DROP TABLE table1; Modified: hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_avg.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_avg.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_avg.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_avg.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; CREATE TEMPORARY FUNCTION example_avg AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleAvg'; Modified: hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; CREATE TEMPORARY FUNCTION example_group_concat AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleGroupConcat'; Modified: hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; CREATE TEMPORARY FUNCTION example_max AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax'; Modified: hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max_n.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max_n.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max_n.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_max_n.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; CREATE TEMPORARY FUNCTION example_max_n AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMaxN'; Modified: hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; CREATE TEMPORARY FUNCTION example_min AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMin'; Modified: hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min_n.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min_n.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min_n.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_min_n.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; CREATE TEMPORARY FUNCTION example_min_n AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMinN'; EXPLAIN Modified: hive/trunk/contrib/src/test/queries/clientpositive/udf_example_add.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_add.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/udf_example_add.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/udf_example_add.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; CREATE TEMPORARY FUNCTION example_add AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleAdd'; Modified: hive/trunk/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; CREATE TEMPORARY FUNCTION example_arraysum AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleArraySum'; CREATE TEMPORARY FUNCTION example_mapconcat AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleMapConcat'; Modified: hive/trunk/contrib/src/test/queries/clientpositive/udf_example_format.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_format.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/udf_example_format.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/udf_example_format.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; CREATE TEMPORARY FUNCTION example_format AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleFormat'; Modified: hive/trunk/contrib/src/test/queries/clientpositive/udf_row_sequence.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udf_row_sequence.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/udf_row_sequence.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/udf_row_sequence.q Thu Oct 31 18:27:31 2013 @@ -4,7 +4,7 @@ drop temporary function row_sequence; -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; create temporary function row_sequence as 'org.apache.hadoop.hive.contrib.udf.UDFRowSequence'; Modified: hive/trunk/contrib/src/test/queries/clientpositive/udtf_explode2.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udtf_explode2.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/udtf_explode2.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/udtf_explode2.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2'; Modified: hive/trunk/contrib/src/test/queries/clientpositive/udtf_output_on_close.q URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/udtf_output_on_close.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/queries/clientpositive/udtf_output_on_close.q (original) +++ hive/trunk/contrib/src/test/queries/clientpositive/udtf_output_on_close.q Thu Oct 31 18:27:31 2013 @@ -1,4 +1,4 @@ -add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar; +add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar; CREATE TEMPORARY FUNCTION udtfCount2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFCount2'; Modified: hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out (original) +++ hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out Thu Oct 31 18:27:31 2013 @@ -92,16 +92,16 @@ WITH SERDEPROPERTIES ( STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@serde_regex -PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex +PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex PREHOOK: type: LOAD PREHOOK: Output: default@serde_regex -POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.log" INTO TABLE serde_regex +POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex POSTHOOK: type: LOAD POSTHOOK: Output: default@serde_regex -PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex +PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex PREHOOK: type: LOAD PREHOOK: Output: default@serde_regex -POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex +POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex POSTHOOK: type: LOAD POSTHOOK: Output: default@serde_regex PREHOOK: query: SELECT * FROM serde_regex ORDER BY time Modified: hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out (original) +++ hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out Thu Oct 31 18:27:31 2013 @@ -32,10 +32,10 @@ totaltime int turnaroundtime int from deserializer referer string from deserializer useragent string from deserializer -PREHOOK: query: LOAD DATA LOCAL INPATH '../contrib/data/files/s3.log' INTO TABLE s3log +PREHOOK: query: LOAD DATA LOCAL INPATH '../../contrib/data/files/s3.log' INTO TABLE s3log PREHOOK: type: LOAD PREHOOK: Output: default@s3log -POSTHOOK: query: LOAD DATA LOCAL INPATH '../contrib/data/files/s3.log' INTO TABLE s3log +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../contrib/data/files/s3.log' INTO TABLE s3log POSTHOOK: type: LOAD POSTHOOK: Output: default@s3log PREHOOK: query: SELECT a.* FROM s3log a Modified: hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out (original) +++ hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out Thu Oct 31 18:27:31 2013 @@ -12,7 +12,7 @@ FROM ( FROM src SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter' - USING 'python ../data/scripts/cat.py' + USING 'python ../../data/scripts/cat.py' AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader' ) tmap @@ -23,14 +23,14 @@ FROM ( FROM src SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter' - USING 'python ../data/scripts/cat.py' + USING 'python ../../data/scripts/cat.py' AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader' ) tmap INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe')) (TOK_RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter') 'python ../data/scripts/cat.py' (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe')) (TOK_RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader') (TOK_ALIASLIST tkey tvalue)))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue))))) + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe')) (TOK_RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter') 'python ../../data/scripts/cat.py' (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe')) (TOK_RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader') (TOK_ALIASLIST tkey tvalue)))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue))))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -57,7 +57,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Transform Operator - command: python ../data/scripts/cat.py + command: python ../../data/scripts/cat.py output info: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -139,7 +139,7 @@ PREHOOK: query: FROM ( FROM src SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter' - USING 'python ../data/scripts/cat.py' + USING 'python ../../data/scripts/cat.py' AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader' ) tmap @@ -151,7 +151,7 @@ POSTHOOK: query: FROM ( FROM src SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter' - USING 'python ../data/scripts/cat.py' + USING 'python ../../data/scripts/cat.py' AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader' ) tmap Added: hive/trunk/data/conf/hive-log4j-new.properties URL: http://svn.apache.org/viewvc/hive/trunk/data/conf/hive-log4j-new.properties?rev=1537576&view=auto ============================================================================== --- hive/trunk/data/conf/hive-log4j-new.properties (added) +++ hive/trunk/data/conf/hive-log4j-new.properties Thu Oct 31 18:27:31 2013 @@ -0,0 +1,78 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Define some default values that can be overridden by system properties +hive.root.logger=DEBUG,DRFA +hive.log.dir=${test.tmp.dir}/log/ +hive.log.file=hive.log + +# Define the root logger to the system property "hadoop.root.logger". +log4j.rootLogger=${hive.root.logger}, EventCounter + +# Logging Threshold +log4j.threshhold=WARN + +# +# Daily Rolling File Appender +# + +log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender +log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file} + +# Rollver at midnight +log4j.appender.DRFA.DatePattern=.yyyy-MM-dd + +# 30-day backup +#log4j.appender.DRFA.MaxBackupIndex=30 +log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout + +# Pattern format: Date LogLevel LoggerName LogMessage +#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n +# Debugging Pattern format +log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n + + +# +# console +# Add "console" to rootlogger above if you want to use this +# + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n + +#custom logging levels +#log4j.logger.xxx=DEBUG + +# +# Event Counter Appender +# Sends counts of logging messages at different severity levels to Hadoop Metrics. +# +log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter + + +log4j.category.DataNucleus=ERROR,DRFA +log4j.category.Datastore=ERROR,DRFA +log4j.category.Datastore.Schema=ERROR,DRFA +log4j.category.JPOX.Datastore=ERROR,DRFA +log4j.category.JPOX.Plugin=ERROR,DRFA +log4j.category.JPOX.MetaData=ERROR,DRFA +log4j.category.JPOX.Query=ERROR,DRFA +log4j.category.JPOX.General=ERROR,DRFA +log4j.category.JPOX.Enhancer=ERROR,DRFA +log4j.logger.org.apache.hadoop.conf.Configuration=ERROR,DRFA + Modified: hive/trunk/data/conf/hive-log4j.properties URL: http://svn.apache.org/viewvc/hive/trunk/data/conf/hive-log4j.properties?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/data/conf/hive-log4j.properties (original) +++ hive/trunk/data/conf/hive-log4j.properties Thu Oct 31 18:27:31 2013 @@ -47,7 +47,7 @@ log4j.appender.DRFA.layout.ConversionPat # # console -# Add "console" to rootlogger above if you want to use this +# Add "console" to rootlogger above if you want to use this # log4j.appender.console=org.apache.log4j.ConsoleAppender Added: hive/trunk/data/conf/hive-site-new.xml URL: http://svn.apache.org/viewvc/hive/trunk/data/conf/hive-site-new.xml?rev=1537576&view=auto ============================================================================== --- hive/trunk/data/conf/hive-site-new.xml (added) +++ hive/trunk/data/conf/hive-site-new.xml Thu Oct 31 18:27:31 2013 @@ -0,0 +1,189 @@ + + + + + + + + + + + + + + + hadoop.tmp.dir + ${test.tmp.dir}/hadoop-tmp + A base for other temporary directories. + + + + + + hive.exec.scratchdir + ${test.tmp.dir}/scratchdir + Scratch space for Hive jobs + + + + hive.exec.local.scratchdir + ${test.tmp.dir}/localscratchdir/ + Local scratch space for Hive jobs + + + + javax.jdo.option.ConnectionURL + jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true + + + + hive.stats.dbconnectionstring + jdbc:derby:;databaseName=${test.tmp.dir}/TempStatsStore;create=true + + + + + javax.jdo.option.ConnectionDriverName + org.apache.derby.jdbc.EmbeddedDriver + + + + javax.jdo.option.ConnectionUserName + APP + + + + javax.jdo.option.ConnectionPassword + mine + + + + + hive.metastore.warehouse.dir + ${test.warehouse.dir} + + + + + hive.metastore.metadb.dir + file://${test.tmp.dir}/metadb/ + + Required by metastore server or if the uris argument below is not supplied + + + + + test.log.dir + ${test.tmp.dir}/log/ + + + + + test.data.files + ${hive.root}/data/files + + + + + hive.jar.path + ${maven.local.repository}/org/apache/hive/hive-exec/${hive.version}/hive-exec-${hive.version}.jar + + + + + hive.metastore.rawstore.impl + org.apache.hadoop.hive.metastore.ObjectStore + Name of the class that implements org.apache.hadoop.hive.metastore.rawstore interface. This class is used to store and retrieval of raw metadata objects such as table, database + + + + hive.querylog.location + ${test.tmp.dir}/tmp + Location of the structured hive logs + + + + hive.exec.pre.hooks + org.apache.hadoop.hive.ql.hooks.PreExecutePrinter, org.apache.hadoop.hive.ql.hooks.EnforceReadOnlyTables + Pre Execute Hook for Tests + + + + hive.exec.post.hooks + org.apache.hadoop.hive.ql.hooks.PostExecutePrinter + Post Execute Hook for Tests + + + + hive.task.progress + false + Track progress of a task + + + + hive.support.concurrency + true + Whether hive supports concurrency or not. A zookeeper instance must be up and running for the default hive lock manager to support read-write locks. + + + + fs.pfile.impl + org.apache.hadoop.fs.ProxyLocalFileSystem + A proxy for local file system used for cross file system testing + + + + hive.exec.mode.local.auto + false + + Let hive determine whether to run in local mode automatically + Disabling this for tests so that minimr is not affected + + + + + hive.auto.convert.join + false + Whether Hive enable the optimization about converting common join into mapjoin based on the input file size + + + + hive.ignore.mapjoin.hint + false + Whether Hive ignores the mapjoin hint + + + + hive.input.format + org.apache.hadoop.hive.ql.io.CombineHiveInputFormat + The default input format, if it is not specified, the system assigns it. It is set to HiveInputFormat for hadoop versions 17, 18 and 19, whereas it is set to CombineHiveInputFormat for hadoop 20. The user can always overwrite it - if there is a bug in CombineHiveInputFormat, it can always be manually set to HiveInputFormat. + + + + hive.default.rcfile.serde + org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + The default SerDe hive will use for the rcfile format + + + Modified: hive/trunk/data/files/symlink1.txt URL: http://svn.apache.org/viewvc/hive/trunk/data/files/symlink1.txt?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/data/files/symlink1.txt (original) +++ hive/trunk/data/files/symlink1.txt Thu Oct 31 18:27:31 2013 @@ -1,2 +1,2 @@ -../data/files/T1.txt -../data/files/T3.txt +../../data/files/T1.txt +../../data/files/T3.txt Modified: hive/trunk/data/files/symlink2.txt URL: http://svn.apache.org/viewvc/hive/trunk/data/files/symlink2.txt?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/data/files/symlink2.txt (original) +++ hive/trunk/data/files/symlink2.txt Thu Oct 31 18:27:31 2013 @@ -1 +1 @@ -../data/files/T2.txt +../../data/files/T2.txt Added: hive/trunk/hbase-handler/pom.xml URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/pom.xml?rev=1537576&view=auto ============================================================================== --- hive/trunk/hbase-handler/pom.xml (added) +++ hive/trunk/hbase-handler/pom.xml Thu Oct 31 18:27:31 2013 @@ -0,0 +1,143 @@ + + + + 4.0.0 + + org.apache.hive + hive + 0.13.0-SNAPSHOT + ../pom.xml + + + hive-hbase-handler + jar + Hive HBase Handler + + + .. + + + + + + org.apache.hive + hive-common + ${project.version} + + + org.apache.hive + hive-metastore + ${project.version} + + + org.apache.hive + hive-serde + ${project.version} + + + org.apache.hive + hive-service + ${project.version} + + + org.apache.hive + hive-exec + ${project.version} + + + org.apache.hive + hive-shims + ${project.version} + uberjar + + + + commons-lang + commons-lang + ${commons-lang.version} + + + commons-logging + commons-logging + ${commons-logging.version} + + + org.apache.hbase + hbase + ${hbase.version} + + + org.apache.thrift + libthrift + + + + + + junit + junit + ${junit.version} + test + + + org.apache.hbase + hbase + ${hbase.version} + tests + + + + + + hadoop-1 + + true + + + + org.apache.hadoop + hadoop-core + ${hadoop-20S.version} + true + + + + + hadoop-2 + + + org.apache.hadoop + hadoop-common + ${hadoop-23.version} + true + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop-23.version} + true + + + + + + + ${basedir}/src/java + ${basedir}/src/test + + + Modified: hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java (original) +++ hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java Thu Oct 31 18:27:31 2013 @@ -58,9 +58,9 @@ public class HBaseTestSetup extends Test } void preTest(HiveConf conf) throws Exception { - + setUpFixtures(conf); - + conf.set("hbase.rootdir", hbaseRoot); conf.set("hbase.master", hbaseCluster.getMaster().getServerName().getHostAndPort()); conf.set("hbase.zookeeper.property.clientPort", Integer.toString(zooKeeperPort)); @@ -73,15 +73,15 @@ public class HBaseTestSetup extends Test } private void setUpFixtures(HiveConf conf) throws Exception { - /* We are not starting zookeeper server here because + /* We are not starting zookeeper server here because * QTestUtil already starts it. */ int zkPort = conf.getInt("hive.zookeeper.client.port", -1); if ((zkPort == zooKeeperPort) && (hbaseCluster != null)) { - return; + return; } zooKeeperPort = zkPort; - String tmpdir = System.getProperty("user.dir")+"/../build/ql/tmp"; + String tmpdir = System.getProperty("test.tmp.dir"); this.tearDown(); conf.set("hbase.master", "local"); Modified: hive/trunk/hbase-handler/src/test/queries/negative/cascade_dbdrop_hadoop20.q URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/queries/negative/cascade_dbdrop_hadoop20.q?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/hbase-handler/src/test/queries/negative/cascade_dbdrop_hadoop20.q (original) +++ hive/trunk/hbase-handler/src/test/queries/negative/cascade_dbdrop_hadoop20.q Thu Oct 31 18:27:31 2013 @@ -16,11 +16,11 @@ STORED BY 'org.apache.hadoop.hive.hbase. WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string") TBLPROPERTIES ("hbase.table.name" = "hbase_table_0"); -dfs -ls ../build/ql/tmp/hbase/hbase_table_0; +dfs -ls target/tmp/hbase/hbase_table_0; DROP DATABASE IF EXISTS hbaseDB CASCADE; -dfs -ls ../build/ql/tmp/hbase/hbase_table_0; +dfs -ls target/tmp/hbase/hbase/hbase_table_0; Added: hive/trunk/hcatalog/core/.gitignore URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/.gitignore?rev=1537576&view=auto ============================================================================== --- hive/trunk/hcatalog/core/.gitignore (added) +++ hive/trunk/hcatalog/core/.gitignore Thu Oct 31 18:27:31 2013 @@ -0,0 +1 @@ +mapred Added: hive/trunk/hcatalog/core/pom-new.xml URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/pom-new.xml?rev=1537576&view=auto ============================================================================== --- hive/trunk/hcatalog/core/pom-new.xml (added) +++ hive/trunk/hcatalog/core/pom-new.xml Thu Oct 31 18:27:31 2013 @@ -0,0 +1,177 @@ + + + + + 4.0.0 + + org.apache.hive.hcatalog + hive-hcatalog + 0.13.0-SNAPSHOT + ../pom.xml + + + hive-hcatalog-core + jar + Hive HCatalog Core + + + ../.. + + + + + + org.apache.hive + hive-cli + ${project.version} + + + org.apache.hive + hive-common + ${project.version} + + + org.apache.hive + hive-metastore + ${project.version} + + + org.apache.hive + hive-exec + ${project.version} + + + + com.google.guava + guava + ${guava.version} + + + org.codehaus.jackson + jackson-mapper-asl + ${jackson.version} + + + + + + hadoop-1 + + true + + + + org.apache.hadoop + hadoop-core + ${hadoop-20S.version} + + + org.apache.hadoop + hadoop-tools + ${hadoop-20S.version} + + + + org.apache.hadoop + hadoop-test + ${hadoop-20S.version} + test + + + org.apache.pig + pig + ${pig.version} + test + + + + + hadoop-2 + + + org.apache.hadoop + hadoop-annotations + ${hadoop-23.version} + + + org.apache.hadoop + hadoop-archives + ${hadoop-23.version} + + + org.apache.hadoop + hadoop-common + ${hadoop-23.version} + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop-23.version} + + + + org.apache.hadoop + hadoop-hdfs + ${hadoop-23.version} + test + + + org.apache.hadoop + hadoop-hdfs + ${hadoop-23.version} + tests + test + + + org.apache.hadoop + hadoop-mapreduce-client-jobclient + ${hadoop-23.version} + tests + test + + + org.apache.pig + pig + ${pig.version} + h2 + test + + + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + + Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java (original) +++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java Thu Oct 31 18:27:31 2013 @@ -108,6 +108,7 @@ public class TestHCatMultiOutputFormat { HiveMetaStore.main(new String[]{"-v", "-p", msPort, "--hiveconf", warehouseConf}); } catch (Throwable t) { System.err.println("Exiting. Got exception from metastore: " + t.getMessage()); + t.printStackTrace(); } } @@ -163,13 +164,14 @@ public class TestHCatMultiOutputFormat { @BeforeClass public static void setup() throws Exception { - String testDir = System.getProperty("test.data.dir", "./"); + System.clearProperty("mapred.job.tracker"); + String testDir = System.getProperty("test.tmp.dir", "./"); testDir = testDir + "/test_multitable_" + Math.abs(new Random().nextLong()) + "/"; workDir = new File(new File(testDir).getCanonicalPath()); FileUtil.fullyDelete(workDir); workDir.mkdirs(); - warehousedir = new Path(workDir + "/warehouse"); + warehousedir = new Path(System.getProperty("test.warehouse.dir")); // Run hive metastore server t = new Thread(new RunMS()); @@ -186,9 +188,10 @@ public class TestHCatMultiOutputFormat { mrCluster = new MiniMRCluster(1, fs.getUri().toString(), 1, null, null, new JobConf(conf)); mrConf = mrCluster.createJobConf(); - fs.mkdirs(warehousedir); initializeSetup(); + + warehousedir.getFileSystem(hiveConf).mkdirs(warehousedir); } private static void initializeSetup() throws Exception { @@ -251,14 +254,15 @@ public class TestHCatMultiOutputFormat { tbl.setPartitionKeys(ColumnHolder.partitionCols); hmsc.createTable(tbl); - FileSystem fs = FileSystem.get(mrConf); - fs.setPermission(new Path(warehousedir, tableName), new FsPermission(tablePerm)); + Path path = new Path(warehousedir, tableName); + FileSystem fs = path.getFileSystem(hiveConf); + fs.setPermission(path, new FsPermission(tablePerm)); } @AfterClass public static void tearDown() throws IOException { FileUtil.fullyDelete(workDir); - FileSystem fs = FileSystem.get(mrConf); + FileSystem fs = warehousedir.getFileSystem(hiveConf); if (fs.exists(warehousedir)) { fs.delete(warehousedir, true); } @@ -367,14 +371,14 @@ public class TestHCatMultiOutputFormat { * @throws Exception if any error occurs */ private List getTableData(String table, String database) throws Exception { - HiveConf conf = new HiveConf(); - conf.addResource("hive-site.xml"); ArrayList results = new ArrayList(); ArrayList temp = new ArrayList(); - Hive hive = Hive.get(conf); + Hive hive = Hive.get(hiveConf); org.apache.hadoop.hive.ql.metadata.Table tbl = hive.getTable(database, table); FetchWork work; - if (!tbl.getPartCols().isEmpty()) { + if (tbl.getPartCols().isEmpty()) { + work = new FetchWork(tbl.getDataLocation().toString(), Utilities.getTableDesc(tbl)); + } else { List partitions = hive.getPartitions(tbl); List partDesc = new ArrayList(); List partLocs = new ArrayList(); @@ -384,12 +388,10 @@ public class TestHCatMultiOutputFormat { } work = new FetchWork(partLocs, partDesc, Utilities.getTableDesc(tbl)); work.setLimit(100); - } else { - work = new FetchWork(tbl.getDataLocation().toString(), Utilities.getTableDesc(tbl)); } FetchTask task = new FetchTask(); task.setWork(work); - task.initialize(conf, null, null); + task.initialize(hiveConf, null, null); task.fetch(temp); for (String str : temp) { results.add(str.replace("\t", ",")); Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java (original) +++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java Thu Oct 31 18:27:31 2013 @@ -51,6 +51,7 @@ import org.apache.hadoop.mapred.MiniMRCl import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; +import org.apache.hadoop.util.Shell; import org.apache.hcatalog.NoExitSecurityManager; import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer; import org.apache.hcatalog.data.DefaultHCatRecord; @@ -74,10 +75,10 @@ public class TestHCatPartitionPublish { private static HiveConf hcatConf; private static HiveMetaStoreClient msc; private static SecurityManager securityManager; + private static Configuration conf = new Configuration(true); @BeforeClass public static void setup() throws Exception { - Configuration conf = new Configuration(true); conf.set("yarn.scheduler.capacity.root.queues", "default"); conf.set("yarn.scheduler.capacity.root.default.capacity", "100"); @@ -150,10 +151,13 @@ public class TestHCatPartitionPublish { Assert.assertEquals(0, ptns.size()); Table table = msc.getTable(dbName, tableName); Assert.assertTrue(table != null); - // Also make sure that the directory has been deleted in the table - // location. - Assert.assertFalse(fs.exists(new Path(table.getSd().getLocation() - + "/part1=p1value1/part0=p0value1"))); + // In Windows, we cannot remove the output directory when job fail. See + // FileOutputCommitterContainer.abortJob + if (!Shell.WINDOWS) { + Path path = new Path(table.getSd().getLocation() + + "/part1=p1value1/part0=p0value1"); + Assert.assertFalse(path.getFileSystem(conf).exists(path)); + } } void runMRCreateFail( Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestMultiOutputFormat.java URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestMultiOutputFormat.java?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestMultiOutputFormat.java (original) +++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestMultiOutputFormat.java Thu Oct 31 18:27:31 2013 @@ -88,7 +88,7 @@ public class TestMultiOutputFormat { } private static void createWorkDir() throws IOException { - String testDir = System.getProperty("test.data.dir", "./"); + String testDir = System.getProperty("test.tmp.dir", "./"); testDir = testDir + "/test_multiout_" + Math.abs(new Random().nextLong()) + "/"; workDir = new File(new File(testDir).getCanonicalPath()); FileUtil.fullyDelete(workDir); Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java (original) +++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java Thu Oct 31 18:27:31 2013 @@ -69,7 +69,7 @@ public class TestSequenceFileReadWrite { @Before public void setup() throws Exception { - dataDir = new File(System.getProperty("java.io.tmpdir") + File.separator + + dataDir = new File(System.getProperty("java.io.tmpdir") + File.separator + TestSequenceFileReadWrite.class.getCanonicalName() + "-" + System.currentTimeMillis()); hiveConf = new HiveConf(this.getClass()); warehouseDir = new File(dataDir, "warehouse").getAbsolutePath(); @@ -102,7 +102,7 @@ public class TestSequenceFileReadWrite { FileUtils.deleteDirectory(dataDir); } } - + @Test public void testSequenceTableWriteRead() throws Exception { String createTable = "CREATE TABLE demo_table(a0 int, a1 String, a2 String) STORED AS SEQUENCEFILE"; Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java (original) +++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java Thu Oct 31 18:27:31 2013 @@ -68,7 +68,7 @@ public class TestRCFileMapReduceInputFor static { try { fs = FileSystem.getLocal(conf); - Path dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred"); + Path dir = new Path(System.getProperty("test.tmp.dir", ".") + "/mapred"); file = new Path(dir, "test_rcfile"); fs.delete(dir, true); // the SerDe part is from TestLazySimpleSerDe @@ -203,7 +203,7 @@ public class TestRCFileMapReduceInputFor private void writeThenReadByRecordReader(int intervalRecordCount, int writeCount, int splitNumber, long maxSplitSize, CompressionCodec codec) throws IOException, InterruptedException { - Path testDir = new Path(System.getProperty("test.data.dir", ".") + Path testDir = new Path(System.getProperty("test.tmp.dir", ".") + "/mapred/testsmallfirstsplit"); Path testFile = new Path(testDir, "test_rcfile"); fs.delete(testFile, true); Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java (original) +++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java Thu Oct 31 18:27:31 2013 @@ -62,7 +62,9 @@ public class MiniCluster { Configuration config = new Configuration(); // Builds and starts the mini dfs and mapreduce clusters - System.setProperty("hadoop.log.dir", "."); + if(System.getProperty("hadoop.log.dir") == null) { + System.setProperty("hadoop.log.dir", "target/tmp/logs/"); + } m_dfs = new MiniDFSCluster(config, dataNodes, true, null); m_fileSys = m_dfs.getFileSystem(); Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java?rev=1537576&r1=1537575&r2=1537576&view=diff ============================================================================== --- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java (original) +++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java Thu Oct 31 18:27:31 2013 @@ -105,6 +105,7 @@ public class TestHCatMultiOutputFormat { HiveMetaStore.main(new String[]{"-v", "-p", msPort, "--hiveconf", warehouseConf}); } catch (Throwable t) { System.err.println("Exiting. Got exception from metastore: " + t.getMessage()); + t.printStackTrace(); } } @@ -160,13 +161,14 @@ public class TestHCatMultiOutputFormat { @BeforeClass public static void setup() throws Exception { - String testDir = System.getProperty("test.data.dir", "./"); + System.clearProperty("mapred.job.tracker"); + String testDir = System.getProperty("test.tmp.dir", "./"); testDir = testDir + "/test_multitable_" + Math.abs(new Random().nextLong()) + "/"; workDir = new File(new File(testDir).getCanonicalPath()); FileUtil.fullyDelete(workDir); workDir.mkdirs(); - warehousedir = new Path(workDir + "/warehouse"); + warehousedir = new Path(System.getProperty("test.warehouse.dir")); // Run hive metastore server t = new Thread(new RunMS()); @@ -183,9 +185,10 @@ public class TestHCatMultiOutputFormat { mrCluster = new MiniMRCluster(1, fs.getUri().toString(), 1, null, null, new JobConf(conf)); mrConf = mrCluster.createJobConf(); - fs.mkdirs(warehousedir); initializeSetup(); + + warehousedir.getFileSystem(conf).mkdirs(warehousedir); } private static void initializeSetup() throws Exception { @@ -248,14 +251,15 @@ public class TestHCatMultiOutputFormat { tbl.setPartitionKeys(ColumnHolder.partitionCols); hmsc.createTable(tbl); - FileSystem fs = FileSystem.get(mrConf); - fs.setPermission(new Path(warehousedir, tableName), new FsPermission(tablePerm)); + Path path = new Path(warehousedir, tableName); + FileSystem fs = path.getFileSystem(hiveConf); + fs.setPermission(path, new FsPermission(tablePerm)); } @AfterClass public static void tearDown() throws IOException { FileUtil.fullyDelete(workDir); - FileSystem fs = FileSystem.get(mrConf); + FileSystem fs = warehousedir.getFileSystem(hiveConf); if (fs.exists(warehousedir)) { fs.delete(warehousedir, true); } @@ -312,7 +316,8 @@ public class TestHCatMultiOutputFormat { Assert.assertEquals("Comparing output of table " + tableNames[0] + " is not correct", outputs.get(0), "a,a,1,ag"); Assert.assertEquals("Comparing output of table " + - tableNames[1] + " is not correct", outputs.get(1), "a,1,ag"); + tableNames[1] + " is not correct", outputs.get(1), + "a,1,ag"); Assert.assertEquals("Comparing output of table " + tableNames[2] + " is not correct", outputs.get(2), "a,a,extra,1,ag");