hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From c..@apache.org
Subject svn commit: r1409752 [1/4] - in /hive/branches/branch-0.9: ./ common/ eclipse-templates/ hbase-handler/ ivy/ ql/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/test/org/apache/hadoop/hive/ql/ ql/src/test/queries/clientnegative/ ql/src/test/queries/...
Date Thu, 15 Nov 2012 12:26:53 GMT
Author: cws
Date: Thu Nov 15 12:26:45 2012
New Revision: 1409752

URL: http://svn.apache.org/viewvc?rev=1409752&view=rev
Log:
HIVE-3437. 0.23 compatibility: fix unit tests when building against 0.23 (Chris Drome via cws)

Added:
    hive/branches/branch-0.9/ql/src/test/queries/clientnegative/local_mapred_error_cache_hadoop20.q
    hive/branches/branch-0.9/ql/src/test/queries/clientnegative/mapreduce_stack_trace_hadoop20.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/auto_join14_hadoop20.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/combine2_hadoop20.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/ctas_hadoop20.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input12_hadoop20.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input39_hadoop20.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/join14_hadoop20.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/repair_hadoop20.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/sample_islocalmode_hook_hadoop20.q
    hive/branches/branch-0.9/ql/src/test/resources/
    hive/branches/branch-0.9/ql/src/test/resources/core-site.xml   (with props)
    hive/branches/branch-0.9/ql/src/test/results/clientnegative/local_mapred_error_cache_hadoop20.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/combine2_hadoop20.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/ctas_hadoop20.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/input12_hadoop20.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/input39_hadoop20.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/join14_hadoop20.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/repair_hadoop20.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/sample_islocalmode_hook_hadoop20.q.out
Modified:
    hive/branches/branch-0.9/build-common.xml
    hive/branches/branch-0.9/build.properties
    hive/branches/branch-0.9/common/ivy.xml
    hive/branches/branch-0.9/eclipse-templates/.classpath
    hive/branches/branch-0.9/hbase-handler/ivy.xml
    hive/branches/branch-0.9/ivy/libraries.properties
    hive/branches/branch-0.9/ql/ivy.xml
    hive/branches/branch-0.9/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
    hive/branches/branch-0.9/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java
    hive/branches/branch-0.9/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
    hive/branches/branch-0.9/ql/src/test/queries/clientnegative/autolocal1.q
    hive/branches/branch-0.9/ql/src/test/queries/clientnegative/local_mapred_error_cache.q
    hive/branches/branch-0.9/ql/src/test/queries/clientnegative/mapreduce_stack_trace.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/auto_join14.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/combine2.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/ctas.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby7_noskew_multi_single_reducer.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby_complex_types_multi_single_reducer.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby_multi_single_reducer.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input12.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input39.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/join14.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/leftsemijoin.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/query_properties.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/repair.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/sample_islocalmode_hook.q
    hive/branches/branch-0.9/ql/src/test/queries/clientpositive/split_sample.q
    hive/branches/branch-0.9/ql/src/test/results/clientnegative/autolocal1.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/auto_join14.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/combine2.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/ctas.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/groupby7_noskew_multi_single_reducer.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/groupby_complex_types_multi_single_reducer.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/groupby_multi_single_reducer.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/input12.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/input39.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/join14.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/leftsemijoin.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/query_properties.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/repair.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out
    hive/branches/branch-0.9/ql/src/test/results/clientpositive/split_sample.q.out
    hive/branches/branch-0.9/service/src/test/org/apache/hadoop/hive/service/TestHiveServerSessions.java
    hive/branches/branch-0.9/shims/build.xml
    hive/branches/branch-0.9/shims/ivy.xml
    hive/branches/branch-0.9/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
    hive/branches/branch-0.9/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
    hive/branches/branch-0.9/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
    hive/branches/branch-0.9/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
    hive/branches/branch-0.9/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java

Modified: hive/branches/branch-0.9/build-common.xml
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/build-common.xml?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/build-common.xml (original)
+++ hive/branches/branch-0.9/build-common.xml Thu Nov 15 12:26:45 2012
@@ -55,11 +55,11 @@
   <property name="test.include" value="Test*"/>
   <property name="test.classpath.id" value="test.classpath"/>
   <property name="test.output" value="true"/>
-  <property name="test.timeout" value="18200000"/>
+  <property name="test.timeout" value="36400000"/>
   <property name="test.junit.output.format" value="xml"/>
   <property name="test.junit.output.usefile" value="true"/>
   <property name="minimr.query.files" value="input16_cc.q,scriptfile1.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q"/>
-  <property name="minimr.query.negative.files" value="minimr_broken_pipe.q,mapreduce_stack_trace.q,mapreduce_stack_trace_turnoff.q" />
+  <property name="minimr.query.negative.files" value="minimr_broken_pipe.q,mapreduce_stack_trace.q,mapreduce_stack_trace_hadoop20.q,mapreduce_stack_trace_turnoff.q" />
   <property name="test.silent" value="true"/>
   <property name="hadoopVersion" value="${hadoop.version.ant-internal}"/>
   <property name="test.serialize.qplan" value="false"/>
@@ -86,11 +86,18 @@
     <pathelement location="${build.dir.hive}/service/test/classes"/>
     <pathelement location="${build.dir.hive}/shims/test/classes"/>
 
-    <fileset dir="${hive.root}/build/ivy/lib/test" includes="*.jar" erroronmissingdir="false" excludes="**/hive_contrib*.jar,**/hive-contrib*.jar"/>
-    <fileset dir="${hive.root}/build/ivy/lib/default" includes="*.jar" erroronmissingdir="false" excludes="**/hive_contrib*.jar,**/hive-contrib*.jar" />
+    <fileset dir="${build.dir.hive}/dist/lib" includes="*.jar" erroronmissingdir="false" excludes="**/hive_contrib*.jar,**/hive-contrib*.jar,**/lib*.jar"/>
     <fileset dir="${hive.root}/testlibs" includes="*.jar"/>
-    <fileset dir="${hive.root}/build/ivy/lib/hadoop0.${hadoop.mr.rev}.shim" includes="*.jar" />
+    <fileset dir="${build.ivy.lib.dir}/hadoop0.${hadoop.mr.rev}.shim" includes="*.jar" erroronmissingdir="false" />
     <pathelement location="${build.classes}" />
+
+    <!-- test directory may contain hadoop jars used by tests only (e.g. mini cluster) -->
+    <fileset dir="${hive.root}/build/ivy/lib/test" includes="*.jar" erroronmissingdir="false"
+             excludes="**/hive_*.jar,**/hive-*.jar"/>
+
+    <!-- we strip out hadoop jars present in places other than the hadoop shimmed dir-->
+    <fileset dir="${hive.root}/build/ivy/lib/default" includes="*.jar" erroronmissingdir="false"
+             excludes="**/hive_*.jar,**/hive-*.jar,**/hadoop-*.jar" />
   </path>
 
   <!-- include contrib on local classpath, but not on cluster -->
@@ -177,7 +184,9 @@
     <pathelement location="${build.dir.hive}/classes"/>
     <fileset dir="${build.dir.hive}" includes="*/*.jar"/>
     <fileset dir="${hive.root}/lib" includes="*.jar"/>
+    <fileset dir="${build.ivy.lib.dir}/hadoop0.${hadoop.mr.rev}.shim" includes="*.jar" erroronmissingdir="false" />
     <fileset dir="${build.ivy.lib.dir}/default" includes="*.jar" 
+             excludes="**/hadoop-*.jar"
              erroronmissingdir="false"/>
   </path>
 
@@ -300,6 +309,9 @@
       <compilerarg line="${javac.args} ${javac.args.warnings}" />
       <classpath refid="test.classpath"/>
     </javac>
+    <!-- Generate a classpath to have YARN use downloaded dependencies. -->
+    <property name="mrapp-classpath" refid="test.classpath" />
+    <echo file="${test.build.classes}/mrapp-generated-classpath" message="${mrapp-classpath}" />
   </target>
 
   <target name="test-jar" depends="compile-test">

Modified: hive/branches/branch-0.9/build.properties
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/build.properties?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/build.properties (original)
+++ hive/branches/branch-0.9/build.properties Thu Nov 15 12:26:45 2012
@@ -28,7 +28,7 @@ javac.args.warnings=
 
 hadoop-0.20.version=0.20.2
 hadoop-0.20S.version=1.0.0
-hadoop-0.23.version=0.23.1
+hadoop-0.23.version=0.23.3
 hadoop.version=${hadoop-0.20.version}
 hadoop.security.version=${hadoop-0.20S.version}
 hadoop.mirror=http://mirror.facebook.net/facebook/hive-deps

Modified: hive/branches/branch-0.9/common/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/common/ivy.xml?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/common/ivy.xml (original)
+++ hive/branches/branch-0.9/common/ivy.xml Thu Nov 15 12:26:45 2012
@@ -28,28 +28,28 @@
   </configurations>
   <dependencies>
     <dependency org="org.apache.hadoop" name="hadoop-common"
-                rev="${hadoop.version}"
+                rev="${hadoop-0.23.version}"
                 conf="hadoop23.compile->default" transitive="false">
       <include type="jar"/>
       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
     </dependency>
     <dependency org="org.apache.hadoop" name="hadoop-auth"
-                rev="${hadoop.version}"
+                rev="${hadoop-0.23.version}"
                 conf="hadoop23.compile->default" transitive="false">
       <include type="jar"/>
       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
     </dependency>
     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-core"
-                rev="${hadoop.version}"
+                rev="${hadoop-0.23.version}"
                 conf="hadoop23.compile->default" transitive="false">
       <include type="jar"/>
       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
     </dependency>
     <dependency org="org.apache.hadoop" name="hadoop-archives"
-                rev="${hadoop.version}"
+                rev="${hadoop-0.23.version}"
                 conf="hadoop23.compile->default" transitive="false">
       <include type="jar"/>
       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
@@ -59,7 +59,7 @@
                 conf="hadoop23.compile->default" transitive="false" />
 
     <dependency org="org.apache.hadoop" name="hadoop-core"
-                rev="${hadoop.version}"
+                rev="${hadoop-0.20.version}"
                 conf="hadoop20.compile->default" transitive="false">
       <include type="jar"/>
       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
@@ -67,14 +67,14 @@
     </dependency>
 
     <dependency org="org.apache.hadoop" name="hadoop-test"
-                rev="${hadoop.version}"
+                rev="${hadoop-0.20.version}"
                 conf="hadoop20.compile->default" transitive="false">
       <include type="jar"/>
       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
     </dependency>
     <dependency org="org.apache.hadoop" name="hadoop-tools"
-                rev="${hadoop.version}"
+                rev="${hadoop-0.20.version}"
                 conf="hadoop20.compile->default" transitive="false">
       <include type="jar"/>
       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->

Modified: hive/branches/branch-0.9/eclipse-templates/.classpath
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/eclipse-templates/.classpath?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/eclipse-templates/.classpath (original)
+++ hive/branches/branch-0.9/eclipse-templates/.classpath Thu Nov 15 12:26:45 2012
@@ -32,7 +32,7 @@
   <classpathentry kind="lib" path="build/ivy/lib/default/commons-logging-@commons-logging.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/commons-logging-api-@commons-logging-api.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/derby-@derby.version@.jar"/>
-  <classpathentry kind="lib" path="build/ivy/lib/default/guava-@guava.version@.jar"/>
+  <classpathentry kind="lib" path="build/ivy/lib/default/guava-@guava-hadoop20.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/hbase-@hbase.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/hbase-@hbase-test.version@-tests.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/libfb303-@libfb303.version@.jar"/>

Modified: hive/branches/branch-0.9/hbase-handler/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/hbase-handler/ivy.xml?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/hbase-handler/ivy.xml (original)
+++ hive/branches/branch-0.9/hbase-handler/ivy.xml Thu Nov 15 12:26:45 2012
@@ -46,5 +46,34 @@
                 transitive="false"/>
     <dependency org="org.codehaus.jackson" name="jackson-jaxrs" rev="${jackson.version}"/>
     <dependency org="org.codehaus.jackson" name="jackson-xc" rev="${jackson.version}"/>
+
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-jobclient" rev="${hadoop.version.ant-internal}"
+                conf="hadoop23.test->default" transitive="false">
+      <artifact name="hadoop-mapreduce-client-jobclient" ext="jar" />
+      <artifact name="hadoop-mapreduce-client-jobclient" type="tests" ext="jar" m:classifier="tests"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-tests"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop23.test->default">
+      <artifact name="hadoop-yarn-server-tests" type="tests" ext="jar" m:classifier="tests"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-app"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop23.test->default">
+      <include type="jar"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-hs"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop23.test->default">
+      <include type="jar"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
   </dependencies>
 </ivy-module>

Modified: hive/branches/branch-0.9/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ivy/libraries.properties?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ivy/libraries.properties (original)
+++ hive/branches/branch-0.9/ivy/libraries.properties Thu Nov 15 12:26:45 2012
@@ -40,7 +40,8 @@ commons-logging.version=1.0.4
 commons-logging-api.version=1.0.4
 commons-pool.version=1.5.4
 derby.version=10.4.2.0
-guava.version=r09
+guava-hadoop20.version=r09
+guava-hadoop23.version=11.0.2
 hbase.version=0.92.0
 jackson.version=1.8.8
 javaewah.version=0.3.2

Modified: hive/branches/branch-0.9/ql/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/ivy.xml?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/ivy.xml (original)
+++ hive/branches/branch-0.9/ql/ivy.xml Thu Nov 15 12:26:45 2012
@@ -39,6 +39,12 @@
     <dependency org="org.antlr" name="antlr" rev="${antlr.version}"
                 transitive="false"/>
 
+    <!-- hadoop specific guava -->
+    <dependency org="com.google.guava" name="guava" rev="${guava-hadoop20.version}"
+                conf="hadoop20.compile->default" transitive="false"/>
+    <dependency org="com.google.guava" name="guava" rev="${guava-hadoop23.version}"
+                conf="hadoop23.compile->default" transitive="false"/>
+
     <dependency org="org.antlr" name="antlr-runtime" rev="${antlr-runtime.version}"/>
     <dependency org="org.slf4j" name="slf4j-api" rev="${slf4j-api.version}"/>
     <dependency org="org.slf4j" name="slf4j-log4j12" rev="${slf4j-log4j12.version}"
@@ -47,8 +53,6 @@
                 rev="${zookeeper.version}" transitive="false">
       <include type="jar"/>
     </dependency>
-    <dependency org="com.google.guava" name="guava" rev="${guava.version}"
-                transitive="false"/>
     <dependency org="org.apache.thrift" name="libthrift" rev="${libthrift.version}"
                 transitive="false"/>
     <dependency org="log4j" name="log4j" rev="${log4j.version}" transitive="false"/>
@@ -82,5 +86,26 @@
       <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
     </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-tests"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop23.test->default">
+      <artifact name="hadoop-yarn-server-tests" type="tests" ext="jar" m:classifier="tests"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-app"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop23.test->default">
+      <include type="jar"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-hs"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop23.test->default">
+      <include type="jar"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
   </dependencies>
 </ivy-module>

Modified: hive/branches/branch-0.9/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java (original)
+++ hive/branches/branch-0.9/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java Thu Nov 15 12:26:45 2012
@@ -541,7 +541,11 @@ public class HadoopJobExecHelper {
         }
         // These tasks should have come from the same job.
         assert (ti.getJobId() != null && ti.getJobId().equals(jobId));
-        ti.getLogUrls().add(getTaskAttemptLogUrl(t.getTaskTrackerHttp(), t.getTaskId()));
+        String taskAttemptLogUrl = ShimLoader.getHadoopShims().getTaskAttemptLogUrl(
+                conf, t.getTaskTrackerHttp(), t.getTaskId());
+        if (taskAttemptLogUrl != null) {
+            ti.getLogUrls().add(taskAttemptLogUrl);
+        }
 
         // If a task failed, then keep track of the total number of failures
         // for that task (typically, a task gets re-run up to 4 times if it

Modified: hive/branches/branch-0.9/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java (original)
+++ hive/branches/branch-0.9/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java Thu Nov 15 12:26:45 2012
@@ -146,7 +146,11 @@ public class JobDebugger implements Runn
           }
           // These tasks should have come from the same job.
           assert (ti.getJobId() != null &&  ti.getJobId().equals(jobId));
-          ti.getLogUrls().add(getTaskAttemptLogUrl(t.getTaskTrackerHttp(), t.getTaskId()));
+          String taskAttemptLogUrl = ShimLoader.getHadoopShims().getTaskAttemptLogUrl(
+                  conf, t.getTaskTrackerHttp(), t.getTaskId());
+          if (taskAttemptLogUrl != null) {
+              ti.getLogUrls().add(taskAttemptLogUrl);
+          }
 
           // If a task failed, then keep track of the total number of failures
           // for that task (typically, a task gets re-run up to 4 times if it

Modified: hive/branches/branch-0.9/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/branch-0.9/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Thu Nov 15 12:26:45 2012
@@ -45,6 +45,7 @@ import java.util.TreeMap;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileStatus;
@@ -212,8 +213,25 @@ public class QTestUtil {
       conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE,
                   (new Path(dfs.getFileSystem().getUri().toString(),
                             "/build/ql/test/data/warehouse/")).toString());
+      int port = 0;
+
+      try {
+        // Hadoop20 MiniMRCluster will return a proper port.
+        // Hadoop23 MiniMRCluster does not implement this method so use the default RM port.
+        port = mr.getJobTrackerPort();
+      } catch (UnsupportedOperationException e) {
+        String address =
+            StringUtils.substringAfterLast(conf.get("yarn.resourcemanager.address"), ":");
+
+        if (StringUtils.isBlank(address)) {
+          throw new IllegalArgumentException("Invalid YARN resource manager port.");
+        }
+
+        port = Integer.parseInt(address);
+      }
+
       ShimLoader.getHadoopShims().setJobLauncherRpcAddress(conf,
-          "localhost:" + mr.getJobTrackerPort());
+          "localhost:" + port);
     }
   }
 
@@ -284,43 +302,62 @@ public class QTestUtil {
     StringBuilder qsb = new StringBuilder();
 
     // Look for a hint to not run a test on some Hadoop versions
-    Pattern pattern = Pattern.compile("-- EXCLUDE_HADOOP_MAJOR_VERSIONS(.*)");
+    Pattern pattern = Pattern.compile("-- (EX|IN)CLUDE_HADOOP_MAJOR_VERSIONS\\((.*)\\)");
 
 
     // Read the entire query
     boolean excludeQuery = false;
+    boolean includeQuery = false;
+    Set<String> versionSet = new HashSet<String>();
     String hadoopVer = ShimLoader.getMajorVersion();
     while (dis.available() != 0) {
       String line = dis.readLine();
 
-      // While we are reading the lines, detect whether this query wants to be
-      // excluded from running because the Hadoop version is incorrect
+      // Each qfile may include at most one INCLUDE or EXCLUDE directive.
+      //
+      // If a qfile contains an INCLUDE directive, and hadoopVer does
+      // not appear in the list of versions to include, then the qfile
+      // is skipped.
+      //
+      // If a qfile contains an EXCLUDE directive, and hadoopVer is
+      // listed in the list of versions to EXCLUDE, then the qfile is
+      // skipped.
+      //
+      // Otherwise, the qfile is included.
       Matcher matcher = pattern.matcher(line);
       if (matcher.find()) {
-        String group = matcher.group();
-        int start = group.indexOf('(');
-        int end = group.indexOf(')');
-        assert end > start;
-        // versions might be something like '0.17, 0.19'
-        String versions = group.substring(start+1, end);
+        if (excludeQuery || includeQuery) {
+          String message = "QTestUtil: qfile " + qf.getName()
+            + " contains more than one reference to (EX|IN)CLUDE_HADOOP_MAJOR_VERSIONS";
+          throw new UnsupportedOperationException(message);
+        }
+
+        String prefix = matcher.group(1);
+        if ("EX".equals(prefix)) {
+          excludeQuery = true;
+        } else {
+          includeQuery = true;
+        }
 
-        Set<String> excludedVersionSet = new HashSet<String>();
+        String versions = matcher.group(2);
         for (String s : versions.split("\\,")) {
           s = s.trim();
-          excludedVersionSet.add(s);
-        }
-        if (excludedVersionSet.contains(hadoopVer)) {
-          excludeQuery = true;
+          versionSet.add(s);
         }
       }
       qsb.append(line + "\n");
     }
     qMap.put(qf.getName(), qsb.toString());
-    if(excludeQuery) {
-      System.out.println("Due to the Hadoop Version ("+ hadoopVer + "), " +
-          "adding query " + qf.getName() + " to the set of tests to skip");
+
+    if (excludeQuery && versionSet.contains(hadoopVer)) {
+      System.out.println("QTestUtil: " + qf.getName()
+        + " EXCLUDE list contains Hadoop Version " + hadoopVer + ". Skipping...");
       qSkipSet.add(qf.getName());
-     }
+    } else if (includeQuery && !versionSet.contains(hadoopVer)) {
+      System.out.println("QTestUtil: " + qf.getName()
+        + " INCLUDE list does not contain Hadoop Version " + hadoopVer + ". Skipping...");
+      qSkipSet.add(qf.getName());
+    }
     dis.close();
   }
 
@@ -521,6 +558,7 @@ public class QTestUtil {
     fpath = new Path(testFiles, "json.txt");
     runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString()
         + "' INTO TABLE src_json");
+
     conf.setBoolean("hive.test.init.phase", false);
   }
 
@@ -934,11 +972,14 @@ public class QTestUtil {
         ".*USING 'java -cp.*",
         "^Deleted.*",
     };
-    maskPatterns(patterns, (new File(logDir, tname + ".out")).getPath());
+
+    File f = new File(logDir, tname + ".out");
+
+    maskPatterns(patterns, f.getPath());
 
     cmdArray = new String[] {
         "diff", "-a",
-        (new File(logDir, tname + ".out")).getPath(),
+        f.getPath(),
         outFileName
     };
 
@@ -960,7 +1001,7 @@ public class QTestUtil {
       System.out.println("Overwriting results");
       cmdArray = new String[3];
       cmdArray[0] = "cp";
-      cmdArray[1] = (new File(logDir, tname + ".out")).getPath();
+      cmdArray[1] = f.getPath();
       cmdArray[2] = outFileName;
       executor = Runtime.getRuntime().exec(cmdArray);
       exitVal = executor.waitFor();

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientnegative/autolocal1.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientnegative/autolocal1.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientnegative/autolocal1.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientnegative/autolocal1.q Thu Nov 15 12:26:45 2012
@@ -2,4 +2,14 @@ set mapred.job.tracker=abracadabra;
 set hive.exec.mode.local.auto.inputbytes.max=1;
 set hive.exec.mode.local.auto=true;
 
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+-- hadoop0.23 changes the behavior of JobClient initialization
+-- in hadoop0.20, JobClient initialization tries to get JobTracker's address
+-- this throws the expected IllegalArgumentException
+-- in hadoop0.23, JobClient initialization only initializes cluster
+-- and get user group information
+-- not attempts to get JobTracker's address
+-- no IllegalArgumentException thrown in JobClient Initialization
+-- an exception is thrown when JobClient submitJob
+
 SELECT key FROM src; 

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientnegative/local_mapred_error_cache.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientnegative/local_mapred_error_cache.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientnegative/local_mapred_error_cache.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientnegative/local_mapred_error_cache.q Thu Nov 15 12:26:45 2012
@@ -1,4 +1,6 @@
 set hive.exec.mode.local.auto=true;
 set hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.VerifySessionStateLocalErrorsHook;
 
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
 FROM src SELECT TRANSFORM(key, value) USING 'python ../data/scripts/cat_error.py' AS (key, value);

Added: hive/branches/branch-0.9/ql/src/test/queries/clientnegative/local_mapred_error_cache_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientnegative/local_mapred_error_cache_hadoop20.q?rev=1409752&view=auto
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientnegative/local_mapred_error_cache_hadoop20.q (added)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientnegative/local_mapred_error_cache_hadoop20.q Thu Nov 15 12:26:45 2012
@@ -0,0 +1,6 @@
+set hive.exec.mode.local.auto=true;
+set hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.VerifySessionStateLocalErrorsHook;
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+FROM src SELECT TRANSFORM(key, value) USING 'python ../data/scripts/cat_error.py' AS (key, value);

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientnegative/mapreduce_stack_trace.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientnegative/mapreduce_stack_trace.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientnegative/mapreduce_stack_trace.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientnegative/mapreduce_stack_trace.q Thu Nov 15 12:26:45 2012
@@ -2,4 +2,6 @@ set hive.exec.mode.local.auto=false;
 set hive.exec.job.debug.capture.stacktraces=true;
 set hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.VerifySessionStateStackTracesHook;
 
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
 FROM src SELECT TRANSFORM(key, value) USING 'script_does_not_exist' AS (key, value);

Added: hive/branches/branch-0.9/ql/src/test/queries/clientnegative/mapreduce_stack_trace_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientnegative/mapreduce_stack_trace_hadoop20.q?rev=1409752&view=auto
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientnegative/mapreduce_stack_trace_hadoop20.q (added)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientnegative/mapreduce_stack_trace_hadoop20.q Thu Nov 15 12:26:45 2012
@@ -0,0 +1,7 @@
+set hive.exec.mode.local.auto=false;
+set hive.exec.job.debug.capture.stacktraces=true;
+set hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.VerifySessionStateStackTracesHook;
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+FROM src SELECT TRANSFORM(key, value) USING 'script_does_not_exist' AS (key, value);

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/auto_join14.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/auto_join14.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/auto_join14.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/auto_join14.q Thu Nov 15 12:26:45 2012
@@ -1,9 +1,12 @@
 
 set hive.auto.convert.join = true;
 
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
 CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE;
 
-set mapred.job.tracker=does.notexist.com:666;
+set mapreduce.framework.name=yarn;
+set mapred.jobtracker.address=localhost:58;
 set hive.exec.mode.local.auto=true;
 
 explain

Added: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/auto_join14_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/auto_join14_hadoop20.q?rev=1409752&view=auto
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/auto_join14_hadoop20.q (added)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/auto_join14_hadoop20.q Thu Nov 15 12:26:45 2012
@@ -0,0 +1,18 @@
+
+set hive.auto.convert.join = true;
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE;
+
+set mapred.job.tracker=localhost:58;
+set hive.exec.mode.local.auto=true;
+
+explain
+FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
+INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value;
+
+FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
+INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value;
+
+SELECT sum(hash(dest1.c1,dest1.c2)) FROM dest1;

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/combine2.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/combine2.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/combine2.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/combine2.q Thu Nov 15 12:26:45 2012
@@ -1,3 +1,5 @@
+USE default;
+
 set hive.input.format=org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
 set mapred.min.split.size=256;
 set mapred.min.split.size.per.node=256;
@@ -8,8 +10,18 @@ set hive.exec.dynamic.partition.mode=non
 set mapred.cache.shared.enabled=false;
 set hive.merge.smallfiles.avgsize=0;
 
+
+
 create table combine2(key string) partitioned by (value string);
 
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+-- This test sets mapred.max.split.size=256 and hive.merge.smallfiles.avgsize=0
+-- in an attempt to force the generation of multiple splits and multiple output files.
+-- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
+-- when using CombineFileInputFormat, so only one split is generated. This has a
+-- significant impact on the results results of this test.
+-- This issue was fixed in MAPREDUCE-2046 which is included in 0.22.
+
 insert overwrite table combine2 partition(value) 
 select * from (
    select key, value from src where key < 10

Added: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/combine2_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/combine2_hadoop20.q?rev=1409752&view=auto
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/combine2_hadoop20.q (added)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/combine2_hadoop20.q Thu Nov 15 12:26:45 2012
@@ -0,0 +1,48 @@
+USE default;
+
+set hive.input.format=org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
+set mapred.min.split.size=256;
+set mapred.min.split.size.per.node=256;
+set mapred.min.split.size.per.rack=256;
+set mapred.max.split.size=256;
+set hive.exec.dynamic.partition=true;
+set hive.exec.dynamic.partition.mode=nonstrict;
+set mapred.cache.shared.enabled=false;
+set hive.merge.smallfiles.avgsize=0;
+
+
+
+create table combine2(key string) partitioned by (value string);
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+-- This test sets mapred.max.split.size=256 and hive.merge.smallfiles.avgsize=0
+-- in an attempt to force the generation of multiple splits and multiple output files.
+-- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
+-- when using CombineFileInputFormat, so only one split is generated. This has a
+-- significant impact on the results results of this test.
+-- This issue was fixed in MAPREDUCE-2046 which is included in 0.22.
+
+insert overwrite table combine2 partition(value) 
+select * from (
+   select key, value from src where key < 10
+   union all 
+   select key, '|' as value from src where key = 11
+   union all
+   select key, '2010-04-21 09:45:00' value from src where key = 19) s;
+
+show partitions combine2;
+
+explain
+select key, value from combine2 where value is not null order by key;
+
+select key, value from combine2 where value is not null order by key;
+
+explain extended
+select count(1) from combine2 where value is not null;
+
+select count(1) from combine2 where value is not null;
+
+explain
+select ds, count(1) from srcpart where ds is not null group by ds;
+
+select ds, count(1) from srcpart where ds is not null group by ds;

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/ctas.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/ctas.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/ctas.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/ctas.q Thu Nov 15 12:26:45 2012
@@ -1,10 +1,4 @@
-
-
-
-
-
-
-
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
 
 create table nzhang_Tmp(a int, b string);
 select * from nzhang_Tmp;
@@ -55,7 +49,8 @@ describe formatted nzhang_CTAS4;
 
 explain extended create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '\012' stored as textfile as select key, value from src sort by key, value limit 10;
 
-set mapred.job.tracker=does.notexist.com:666;
+set mapreduce.framework.name=yarn;
+set mapreduce.jobtracker.address=localhost:58;
 set hive.exec.mode.local.auto=true;
 
 create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '\012' stored as textfile as select key, value from src sort by key, value limit 10;
@@ -63,12 +58,3 @@ create table nzhang_ctas5 row format del
 create table nzhang_ctas6 (key string, `to` string);
 insert overwrite table nzhang_ctas6 select key, value from src limit 10;
 create table nzhang_ctas7 as select key, `to` from nzhang_ctas6;
-
-
-
-
-
-
-
-
-

Added: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/ctas_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/ctas_hadoop20.q?rev=1409752&view=auto
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/ctas_hadoop20.q (added)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/ctas_hadoop20.q Thu Nov 15 12:26:45 2012
@@ -0,0 +1,59 @@
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+create table nzhang_Tmp(a int, b string);
+select * from nzhang_Tmp;
+
+explain create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10;
+
+create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10;
+
+select * from nzhang_CTAS1;
+
+describe formatted nzhang_CTAS1;
+
+
+explain create table nzhang_ctas2 as select * from src sort by key, value limit 10;
+
+create table nzhang_ctas2 as select * from src sort by key, value limit 10;
+
+select * from nzhang_ctas2;
+
+describe formatted nzhang_CTAS2;
+
+
+explain create table nzhang_ctas3 row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" stored as RCFile as select key/2 half_key, concat(value, "_con") conb  from src sort by half_key, conb limit 10;
+
+create table nzhang_ctas3 row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" stored as RCFile as select key/2 half_key, concat(value, "_con") conb  from src sort by half_key, conb limit 10;
+
+select * from nzhang_ctas3;
+
+describe formatted nzhang_CTAS3;
+
+
+explain create table if not exists nzhang_ctas3 as select key, value from src sort by key, value limit 2;
+
+create table if not exists nzhang_ctas3 as select key, value from src sort by key, value limit 2;
+
+select * from nzhang_ctas3;
+
+describe formatted nzhang_CTAS3;
+
+
+explain create table nzhang_ctas4 row format delimited fields terminated by ',' stored as textfile as select key, value from src sort by key, value limit 10;
+
+create table nzhang_ctas4 row format delimited fields terminated by ',' stored as textfile as select key, value from src sort by key, value limit 10;
+
+select * from nzhang_ctas4;
+
+describe formatted nzhang_CTAS4;
+
+explain extended create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '\012' stored as textfile as select key, value from src sort by key, value limit 10;
+
+set mapred.job.tracker=localhost:58;
+set hive.exec.mode.local.auto=true;
+
+create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '\012' stored as textfile as select key, value from src sort by key, value limit 10;
+
+create table nzhang_ctas6 (key string, `to` string);
+insert overwrite table nzhang_ctas6 select key, value from src limit 10;
+create table nzhang_ctas7 as select key, `to` from nzhang_ctas6;

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby7_noskew_multi_single_reducer.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby7_noskew_multi_single_reducer.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby7_noskew_multi_single_reducer.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby7_noskew_multi_single_reducer.q Thu Nov 15 12:26:45 2012
@@ -6,7 +6,7 @@ CREATE TABLE DEST1(key INT, value STRING
 CREATE TABLE DEST2(key INT, value STRING) STORED AS TEXTFILE;
 
 SET hive.exec.compress.intermediate=true;
-SET hive.exec.compress.output=true; 
+SET hive.exec.compress.output=true;
 
 EXPLAIN
 FROM SRC
@@ -17,5 +17,5 @@ FROM SRC
 INSERT OVERWRITE TABLE DEST1 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key limit 10
 INSERT OVERWRITE TABLE DEST2 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key limit 10;
 
-SELECT DEST1.* FROM DEST1;
-SELECT DEST2.* FROM DEST2;
+SELECT DEST1.* FROM DEST1 ORDER BY key ASC, value ASC;
+SELECT DEST2.* FROM DEST2 ORDER BY key ASC, value ASC;

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby_complex_types_multi_single_reducer.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby_complex_types_multi_single_reducer.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby_complex_types_multi_single_reducer.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby_complex_types_multi_single_reducer.q Thu Nov 15 12:26:45 2012
@@ -12,6 +12,6 @@ FROM SRC
 INSERT OVERWRITE TABLE DEST1 SELECT ARRAY(SRC.key), COUNT(1) GROUP BY ARRAY(SRC.key) limit 10
 INSERT OVERWRITE TABLE DEST2 SELECT MAP(SRC.key, SRC.value), COUNT(1) GROUP BY MAP(SRC.key, SRC.value) limit 10;
 
-SELECT DEST1.* FROM DEST1;
-SELECT DEST2.* FROM DEST2;
+SELECT DEST1.* FROM DEST1 ORDER BY key[0] ASC, value ASC;
+SELECT DEST2.* FROM DEST2 ORDER BY 1 ASC, value ASC;
 

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby_multi_single_reducer.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby_multi_single_reducer.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby_multi_single_reducer.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/groupby_multi_single_reducer.q Thu Nov 15 12:26:45 2012
@@ -17,9 +17,9 @@ INSERT OVERWRITE TABLE dest_g2 SELECT su
 INSERT OVERWRITE TABLE dest_g3 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) WHERE substr(src.key,1,1) < 5 GROUP BY substr(src.key,1,1)
 INSERT OVERWRITE TABLE dest_g4 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1);
 
-SELECT * FROM dest_g2;
-SELECT * FROM dest_g3;
-SELECT * FROM dest_g4;
+SELECT * FROM dest_g2 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
+SELECT * FROM dest_g3 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
+SELECT * FROM dest_g4 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
 
 EXPLAIN
 FROM src
@@ -36,11 +36,11 @@ INSERT OVERWRITE TABLE dest_g4 SELECT su
 INSERT OVERWRITE TABLE dest_h2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1), substr(src.key,2,1) LIMIT 10
 INSERT OVERWRITE TABLE dest_h3 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(substr(src.value, 5)), count(src.value) WHERE substr(src.key,1,1) >= 5 GROUP BY substr(src.key,1,1), substr(src.key,2,1);
 
-SELECT * FROM dest_g2;
-SELECT * FROM dest_g3;
-SELECT * FROM dest_g4;
-SELECT * FROM dest_h2;
-SELECT * FROM dest_h3;
+SELECT * FROM dest_g2 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
+SELECT * FROM dest_g3 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
+SELECT * FROM dest_g4 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
+SELECT * FROM dest_h2 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
+SELECT * FROM dest_h3 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
 
 DROP TABLE dest_g2;
 DROP TABLE dest_g3;

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input12.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input12.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input12.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input12.q Thu Nov 15 12:26:45 2012
@@ -1,6 +1,9 @@
-set mapred.job.tracker=does.notexist.com:666;
+set mapreduce.framework.name=yarn;
+set mapreduce.jobtracker.address=localhost:58;
 set hive.exec.mode.local.auto=true;
 
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
 CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
 CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE;
 CREATE TABLE dest3(key INT) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE;

Added: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input12_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input12_hadoop20.q?rev=1409752&view=auto
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input12_hadoop20.q (added)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input12_hadoop20.q Thu Nov 15 12:26:45 2012
@@ -0,0 +1,23 @@
+set mapred.job.tracker=localhost:58;
+set hive.exec.mode.local.auto=true;
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
+CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE;
+CREATE TABLE dest3(key INT) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE;
+
+EXPLAIN
+FROM src 
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200;
+
+FROM src 
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200;
+
+SELECT dest1.* FROM dest1;
+SELECT dest2.* FROM dest2;
+SELECT dest3.* FROM dest3;

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input39.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input39.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input39.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input39.q Thu Nov 15 12:26:45 2012
@@ -1,4 +1,4 @@
-
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
 
 
 create table t1(key string, value string) partitioned by (ds string);
@@ -15,7 +15,8 @@ select key, value from src;
 
 set hive.test.mode=true;
 set hive.mapred.mode=strict;
-set mapred.job.tracker=does.notexist.com:666;
+set mapreduce.framework.name=yarn;
+set mapreduce.jobtracker.address=localhost:58;
 set hive.exec.mode.local.auto=true;
 
 explain
@@ -24,7 +25,5 @@ select count(1) from t1 join t2 on t1.ke
 select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1';
 
 set hive.test.mode=false;
-set mapred.job.tracker;
-
-
-
+set mapreduce.framework.name;
+set mapreduce.jobtracker.address;

Added: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input39_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input39_hadoop20.q?rev=1409752&view=auto
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input39_hadoop20.q (added)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/input39_hadoop20.q Thu Nov 15 12:26:45 2012
@@ -0,0 +1,30 @@
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+
+create table t1(key string, value string) partitioned by (ds string);
+create table t2(key string, value string) partitioned by (ds string);
+
+insert overwrite table t1 partition (ds='1')
+select key, value from src;
+
+insert overwrite table t1 partition (ds='2')
+select key, value from src;
+
+insert overwrite table t2 partition (ds='1')
+select key, value from src;
+
+set hive.test.mode=true;
+set hive.mapred.mode=strict;
+set mapred.job.tracker=localhost:58;
+set hive.exec.mode.local.auto=true;
+
+explain
+select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1';
+
+select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1';
+
+set hive.test.mode=false;
+set mapred.job.tracker;
+
+
+

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/join14.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/join14.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/join14.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/join14.q Thu Nov 15 12:26:45 2012
@@ -1,7 +1,11 @@
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
 CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE;
 
-set mapred.job.tracker=does.notexist.com:666;
+set mapreduce.framework.name=yarn;
+set mapreduce.jobtracker.address=localhost:58;
 set hive.exec.mode.local.auto=true;
+set hive.exec.mode.local.auto.input.files.max=6;
 
 EXPLAIN
 FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100

Added: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/join14_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/join14_hadoop20.q?rev=1409752&view=auto
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/join14_hadoop20.q (added)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/join14_hadoop20.q Thu Nov 15 12:26:45 2012
@@ -0,0 +1,15 @@
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE;
+
+set mapred.job.tracker=localhost:58;
+set hive.exec.mode.local.auto=true;
+
+EXPLAIN
+FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
+INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value;
+
+FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
+INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value;
+
+select dest1.* from dest1;

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/leftsemijoin.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/leftsemijoin.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/leftsemijoin.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/leftsemijoin.q Thu Nov 15 12:26:45 2012
@@ -13,11 +13,11 @@ load data local inpath '../data/files/sa
 load data local inpath '../data/files/things.txt' INTO TABLE things partition(ds='2011-10-23');
 load data local inpath '../data/files/things2.txt' INTO TABLE things partition(ds='2011-10-24');
 
-SELECT name,id FROM sales;
+SELECT name,id FROM sales ORDER BY name ASC, id ASC;
 
-SELECT id,name FROM things;
+SELECT id,name FROM things ORDER BY id ASC, name ASC;
 
-SELECT name,id FROM sales LEFT SEMI JOIN things ON (sales.id = things.id);
+SELECT name,id FROM sales LEFT SEMI JOIN things ON (sales.id = things.id) ORDER BY name ASC, id ASC;
 
 drop table sales;
 drop table things;

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/query_properties.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/query_properties.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/query_properties.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/query_properties.q Thu Nov 15 12:26:45 2012
@@ -1,14 +1,14 @@
 set hive.exec.post.hooks = org.apache.hadoop.hive.ql.hooks.CheckQueryPropertiesHook;
 
-select * from src a join src b on a.key = b.key limit 1;
-select * from src group by src.key, src.value limit 1;
-select * from src order by src.key limit 1;
-select * from src sort by src.key limit 1;
-select a.key, sum(b.value) from src a join src b on a.key = b.key group by a.key limit 1;
-select transform(*) using 'cat' from src limit 1;
-select * from src distribute by src.key limit 1;
-select * from src cluster by src.key limit 1;
+select * from src a join src b on a.key = b.key limit 0;
+select * from src group by src.key, src.value limit 0;
+select * from src order by src.key limit 0;
+select * from src sort by src.key limit 0;
+select a.key, sum(b.value) from src a join src b on a.key = b.key group by a.key limit 0;
+select transform(*) using 'cat' from src limit 0;
+select * from src distribute by src.key limit 0;
+select * from src cluster by src.key limit 0;
 
-select key, sum(value) from (select a.key as key, b.value as value from src a join src b on a.key = b.key) c group by key limit 1;
-select * from src a join src b on a.key = b.key order by a.key limit 1;
-select * from src a join src b on a.key = b.key distribute by a.key sort by a.key, b.value limit 1;
\ No newline at end of file
+select key, sum(value) from (select a.key as key, b.value as value from src a join src b on a.key = b.key) c group by key limit 0;
+select * from src a join src b on a.key = b.key order by a.key limit 0;
+select * from src a join src b on a.key = b.key distribute by a.key sort by a.key, b.value limit 0;

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/repair.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/repair.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/repair.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/repair.q Thu Nov 15 12:26:45 2012
@@ -1,11 +1,11 @@
-
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
 
 CREATE TABLE repairtable(col STRING) PARTITIONED BY (p1 STRING, p2 STRING);
 
 MSCK TABLE repairtable;
 
-dfs -mkdir ../build/ql/test/data/warehouse/repairtable/p1=a/p2=a;
-dfs -mkdir ../build/ql/test/data/warehouse/repairtable/p1=b/p2=a;
+dfs -mkdir -p ../build/ql/test/data/warehouse/repairtable/p1=a/p2=a;
+dfs -mkdir -p ../build/ql/test/data/warehouse/repairtable/p1=b/p2=a;
 
 MSCK TABLE repairtable;
 

Added: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/repair_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/repair_hadoop20.q?rev=1409752&view=auto
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/repair_hadoop20.q (added)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/repair_hadoop20.q Thu Nov 15 12:26:45 2012
@@ -0,0 +1,16 @@
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+CREATE TABLE repairtable(col STRING) PARTITIONED BY (p1 STRING, p2 STRING);
+
+MSCK TABLE repairtable;
+
+dfs -mkdir ../build/ql/test/data/warehouse/repairtable/p1=a/p2=a;
+dfs -mkdir ../build/ql/test/data/warehouse/repairtable/p1=b/p2=a;
+
+MSCK TABLE repairtable;
+
+MSCK REPAIR TABLE repairtable;
+
+MSCK TABLE repairtable;
+
+

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/sample_islocalmode_hook.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/sample_islocalmode_hook.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/sample_islocalmode_hook.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/sample_islocalmode_hook.q Thu Nov 15 12:26:45 2012
@@ -10,6 +10,8 @@ set mapred.min.split.size.per.rack=300;
 set hive.exec.mode.local.auto=true;
 set hive.merge.smallfiles.avgsize=1;
 
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
 -- create file inputs
 create table sih_i_part (key int, value string) partitioned by (p string);
 insert overwrite table sih_i_part partition (p='1') select key, value from src;
@@ -18,21 +20,24 @@ insert overwrite table sih_i_part partit
 create table sih_src as select key, value from sih_i_part order by key, value;
 create table sih_src2 as select key, value from sih_src order by key, value;
 
-set hive.exec.post.hooks = org.apache.hadoop.hive.ql.hooks.VerifyIsLocalModeHook ;
-set mapred.job.tracker=does.notexist.com:666;
-set hive.exec.mode.local.auto.input.files.max=1;
+set hive.exec.post.hooks = org.apache.hadoop.hive.ql.hooks.VerifyIsLocalModeHook;
+set mapreduce.framework.name=yarn;
+set mapreduce.jobtracker.address=localhost:58;
+set hive.sample.seednumber=7;
+
+-- Relaxing hive.exec.mode.local.auto.input.files.max=1.
+-- Hadoop20 will not generate more splits than there are files (one).
+-- Hadoop23 generate splits correctly (four), hence the max needs to be adjusted to ensure running in local mode.
+-- Default value is hive.exec.mode.local.auto.input.files.max=4 which produces expected behavior on Hadoop23.
+-- hive.sample.seednumber is required because Hadoop23 generates multiple splits and tablesample is non-repeatable without it.
 
 -- sample split, running locally limited by num tasks
 select count(1) from sih_src tablesample(1 percent);
 
-set mapred.job.tracker=does.notexist.com:666;
-
 -- sample two tables
-select count(1) from sih_src tablesample(1 percent)a join sih_src2 tablesample(1 percent)b on a.key = b.key;
+select count(1) from sih_src tablesample(1 percent) a join sih_src2 tablesample(1 percent) b on a.key = b.key;
 
 set hive.exec.mode.local.auto.inputbytes.max=1000;
-set hive.exec.mode.local.auto.input.files.max=4;
-set mapred.job.tracker=does.notexist.com:666;
 
 -- sample split, running locally limited by max bytes
 select count(1) from sih_src tablesample(1 percent);

Added: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/sample_islocalmode_hook_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/sample_islocalmode_hook_hadoop20.q?rev=1409752&view=auto
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/sample_islocalmode_hook_hadoop20.q (added)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/sample_islocalmode_hook_hadoop20.q Thu Nov 15 12:26:45 2012
@@ -0,0 +1,47 @@
+drop table if exists sih_i_part;
+drop table if exists sih_src;
+drop table if exists sih_src2;
+
+set hive.input.format=org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
+set mapred.max.split.size=300;
+set mapred.min.split.size=300;
+set mapred.min.split.size.per.node=300;
+set mapred.min.split.size.per.rack=300;
+set hive.exec.mode.local.auto=true;
+set hive.merge.smallfiles.avgsize=1;
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+-- create file inputs
+create table sih_i_part (key int, value string) partitioned by (p string);
+insert overwrite table sih_i_part partition (p='1') select key, value from src;
+insert overwrite table sih_i_part partition (p='2') select key+10000, value from src;
+insert overwrite table sih_i_part partition (p='3') select key+20000, value from src;
+create table sih_src as select key, value from sih_i_part order by key, value;
+create table sih_src2 as select key, value from sih_src order by key, value;
+
+set hive.exec.post.hooks = org.apache.hadoop.hive.ql.hooks.VerifyIsLocalModeHook ;
+set mapred.job.tracker=localhost:58;
+set hive.exec.mode.local.auto.input.files.max=1;
+set hive.sample.seednumber=7;
+
+-- sample split, running locally limited by num tasks
+select count(1) from sih_src tablesample(1 percent);
+
+set mapred.job.tracker=localhost:58;
+set hive.sample.seednumber=7;
+
+-- sample two tables
+select count(1) from sih_src tablesample(1 percent)a join sih_src2 tablesample(1 percent)b on a.key = b.key;
+
+set hive.exec.mode.local.auto.inputbytes.max=1000;
+set hive.exec.mode.local.auto.input.files.max=4;
+set mapred.job.tracker=localhost:58;
+set hive.sample.seednumber=7;
+
+-- sample split, running locally limited by max bytes
+select count(1) from sih_src tablesample(1 percent);
+
+drop table sih_i_part;
+drop table sih_src;
+drop table sih_src2;

Modified: hive/branches/branch-0.9/ql/src/test/queries/clientpositive/split_sample.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/queries/clientpositive/split_sample.q?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/queries/clientpositive/split_sample.q (original)
+++ hive/branches/branch-0.9/ql/src/test/queries/clientpositive/split_sample.q Thu Nov 15 12:26:45 2012
@@ -12,6 +12,15 @@ set mapred.min.split.size=300;
 set mapred.min.split.size.per.node=300;
 set mapred.min.split.size.per.rack=300;
 set hive.merge.smallfiles.avgsize=1;
+set hive.sample.seednumber=7;
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+-- This test sets mapred.max.split.size=300 and hive.merge.smallfiles.avgsize=1
+-- in an attempt to force the generation of multiple splits and multiple output files.
+-- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
+-- when using CombineFileInputFormat, so only one split is generated. This has a
+-- significant impact on the results of the TABLESAMPLE(x PERCENT). This issue was
+-- fixed in MAPREDUCE-2046 which is included in 0.22.
 
 -- create multiple file inputs (two enable multiple splits)
 create table ss_i_part (key int, value string) partitioned by (p string);
@@ -40,6 +49,7 @@ set hive.sample.seednumber=5;
 create table ss_t5 as select sum(key) % 397 as s from ss_src3 tablesample(1 percent) limit 10;
 select sum(s) from (select s from ss_t3 union all select s from ss_t4 union all select s from ss_t5) t;
 
+set hive.sample.seednumber=7;
 -- sample more than one split
 explain select count(distinct key) from ss_src2 tablesample(70 percent) limit 10;
 select count(distinct key) from ss_src2 tablesample(70 percent) limit 10;

Added: hive/branches/branch-0.9/ql/src/test/resources/core-site.xml
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/resources/core-site.xml?rev=1409752&view=auto
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/resources/core-site.xml (added)
+++ hive/branches/branch-0.9/ql/src/test/resources/core-site.xml Thu Nov 15 12:26:45 2012
@@ -0,0 +1,27 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<!-- Test file for TestHiveConf -->
+
+<configuration>
+  <property>
+    <name>yarn.app.mapreduce.am.job.node-blacklisting.enable</name>
+    <value>false</value>
+  </property>
+</configuration>

Propchange: hive/branches/branch-0.9/ql/src/test/resources/core-site.xml
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: hive/branches/branch-0.9/ql/src/test/results/clientnegative/autolocal1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/results/clientnegative/autolocal1.q.out?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/results/clientnegative/autolocal1.q.out (original)
+++ hive/branches/branch-0.9/ql/src/test/results/clientnegative/autolocal1.q.out Thu Nov 15 12:26:45 2012
@@ -1,4 +1,14 @@
-PREHOOK: query: SELECT key FROM src
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+-- hadoop0.23 changes the behavior of JobClient initialization
+-- in hadoop0.20, JobClient initialization tries to get JobTracker's address
+-- this throws the expected IllegalArgumentException
+-- in hadoop0.23, JobClient initialization only initializes cluster
+-- and get user group information
+-- not attempts to get JobTracker's address
+-- no IllegalArgumentException thrown in JobClient Initialization
+-- an exception is thrown when JobClient submitJob
+
+SELECT key FROM src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####

Modified: hive/branches/branch-0.9/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out (original)
+++ hive/branches/branch-0.9/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out Thu Nov 15 12:26:45 2012
@@ -1,4 +1,6 @@
-PREHOOK: query: FROM src SELECT TRANSFORM(key, value) USING 'python ../data/scripts/cat_error.py' AS (key, value)
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+FROM src SELECT TRANSFORM(key, value) USING 'python ../data/scripts/cat_error.py' AS (key, value)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####

Added: hive/branches/branch-0.9/ql/src/test/results/clientnegative/local_mapred_error_cache_hadoop20.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/results/clientnegative/local_mapred_error_cache_hadoop20.q.out?rev=1409752&view=auto
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/results/clientnegative/local_mapred_error_cache_hadoop20.q.out (added)
+++ hive/branches/branch-0.9/ql/src/test/results/clientnegative/local_mapred_error_cache_hadoop20.q.out Thu Nov 15 12:26:45 2012
@@ -0,0 +1,26 @@
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+FROM src SELECT TRANSFORM(key, value) USING 'python ../data/scripts/cat_error.py' AS (key, value)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-1
+
+Logs:
+
+#### A masked pattern was here ####
+ID: Stage-1
+org.apache.hadoop.hive.ql.metadata.HiveException: Hit error while closing ..
+#### A masked pattern was here ####
+org.apache.hadoop.hive.ql.metadata.HiveException: Hit error while closing ..
+#### A masked pattern was here ####
+org.apache.hadoop.hive.ql.metadata.HiveException: Hit error while closing ..
+#### A masked pattern was here ####
+Ended Job = job_local_0001 with errors
+Error during job, obtaining debugging information...
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hive/branches/branch-0.9/ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out (original)
+++ hive/branches/branch-0.9/ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out Thu Nov 15 12:26:45 2012
@@ -1,13 +1,7 @@
-PREHOOK: query: FROM src SELECT TRANSFORM(key, value) USING 'script_does_not_exist' AS (key, value)
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+FROM src SELECT TRANSFORM(key, value) USING 'script_does_not_exist' AS (key, value)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
-Hive Runtime Error while processing row {"key":"238","value":"val_238"}
-FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
-Hive Runtime Error while processing row {"key":"238","value":"val_238"}
-FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
-Hive Runtime Error while processing row {"key":"238","value":"val_238"}
-FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
-Hive Runtime Error while processing row {"key":"238","value":"val_238"}
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Added: hive/branches/branch-0.9/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out?rev=1409752&view=auto
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out (added)
+++ hive/branches/branch-0.9/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out Thu Nov 15 12:26:45 2012
@@ -0,0 +1,15 @@
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+FROM src SELECT TRANSFORM(key, value) USING 'script_does_not_exist' AS (key, value)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
+Hive Runtime Error while processing row {"key":"238","value":"val_238"}
+FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
+Hive Runtime Error while processing row {"key":"238","value":"val_238"}
+FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
+Hive Runtime Error while processing row {"key":"238","value":"val_238"}
+FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"}
+Hive Runtime Error while processing row {"key":"238","value":"val_238"}
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hive/branches/branch-0.9/ql/src/test/results/clientpositive/auto_join14.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/results/clientpositive/auto_join14.q.out?rev=1409752&r1=1409751&r2=1409752&view=diff
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/results/clientpositive/auto_join14.q.out (original)
+++ hive/branches/branch-0.9/ql/src/test/results/clientpositive/auto_join14.q.out Thu Nov 15 12:26:45 2012
@@ -1,6 +1,10 @@
-PREHOOK: query: CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
-POSTHOOK: query: CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@dest1
 PREHOOK: query: explain

Added: hive/branches/branch-0.9/ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.9/ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out?rev=1409752&view=auto
==============================================================================
--- hive/branches/branch-0.9/ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out (added)
+++ hive/branches/branch-0.9/ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out Thu Nov 15 12:26:45 2012
@@ -0,0 +1,278 @@
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: explain
+FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
+INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
+INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src)) (TOK_TABREF (TOK_TABNAME srcpart)) (and (AND (= (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL srcpart) key)) (= (. (TOK_TABLE_OR_COL srcpart) ds) '2008-04-08')) (> (. (TOK_TABLE_OR_COL src) key) 100)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) value)))))
+
+STAGE DEPENDENCIES:
+  Stage-6 is a root stage , consists of Stage-7, Stage-8, Stage-1
+  Stage-7 has a backup stage: Stage-1
+  Stage-4 depends on stages: Stage-7
+  Stage-0 depends on stages: Stage-1, Stage-4, Stage-5
+  Stage-2 depends on stages: Stage-0
+  Stage-8 has a backup stage: Stage-1
+  Stage-5 depends on stages: Stage-8
+  Stage-1
+
+STAGE PLANS:
+  Stage: Stage-6
+    Conditional Operator
+
+  Stage: Stage-7
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        srcpart 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        srcpart 
+          TableScan
+            alias: srcpart
+            HashTable Sink Operator
+              condition expressions:
+                0 {key}
+                1 {value}
+              handleSkewJoin: false
+              keys:
+                0 [Column[key]]
+                1 [Column[key]]
+              Position of Big Table: 0
+
+  Stage: Stage-4
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Filter Operator
+              predicate:
+                  expr: (key > 100.0)
+                  type: boolean
+              Map Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                condition expressions:
+                  0 {key}
+                  1 {value}
+                handleSkewJoin: false
+                keys:
+                  0 [Column[key]]
+                  1 [Column[key]]
+                outputColumnNames: _col0, _col5
+                Position of Big Table: 0
+                Select Operator
+                  expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col5
+                        type: string
+                  outputColumnNames: _col0, _col1
+                  Select Operator
+                    expressions:
+                          expr: UDFToInteger(_col0)
+                          type: int
+                          expr: _col1
+                          type: string
+                    outputColumnNames: _col0, _col1
+                    File Output Operator
+                      compressed: false
+                      GlobalTableId: 1
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.dest1
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+
+  Stage: Stage-8
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        src 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Filter Operator
+              predicate:
+                  expr: (key > 100.0)
+                  type: boolean
+              HashTable Sink Operator
+                condition expressions:
+                  0 {key}
+                  1 {value}
+                handleSkewJoin: false
+                keys:
+                  0 [Column[key]]
+                  1 [Column[key]]
+                Position of Big Table: 1
+
+  Stage: Stage-5
+    Map Reduce
+      Alias -> Map Operator Tree:
+        srcpart 
+          TableScan
+            alias: srcpart
+            Map Join Operator
+              condition map:
+                   Inner Join 0 to 1
+              condition expressions:
+                0 {key}
+                1 {value}
+              handleSkewJoin: false
+              keys:
+                0 [Column[key]]
+                1 [Column[key]]
+              outputColumnNames: _col0, _col5
+              Position of Big Table: 1
+              Select Operator
+                expressions:
+                      expr: _col0
+                      type: string
+                      expr: _col5
+                      type: string
+                outputColumnNames: _col0, _col1
+                Select Operator
+                  expressions:
+                        expr: UDFToInteger(_col0)
+                        type: int
+                        expr: _col1
+                        type: string
+                  outputColumnNames: _col0, _col1
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 1
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest1
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Filter Operator
+              predicate:
+                  expr: (key > 100.0)
+                  type: boolean
+              Reduce Output Operator
+                key expressions:
+                      expr: key
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: key
+                      type: string
+                tag: 0
+                value expressions:
+                      expr: key
+                      type: string
+        srcpart 
+          TableScan
+            alias: srcpart
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+              tag: 1
+              value expressions:
+                    expr: value
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0}
+            1 {VALUE._col1}
+          handleSkewJoin: false
+          outputColumnNames: _col0, _col5
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col5
+                  type: string
+            outputColumnNames: _col0, _col1
+            Select Operator
+              expressions:
+                    expr: UDFToInteger(_col0)
+                    type: int
+                    expr: _col1
+                    type: string
+              outputColumnNames: _col0, _col1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.dest1
+
+
+PREHOOK: query: FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
+INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
+INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@dest1
+POSTHOOK: Lineage: dest1.c1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c2 SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT sum(hash(dest1.c1,dest1.c2)) FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT sum(hash(dest1.c1,dest1.c2)) FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: dest1.c1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c2 SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+404554174174



Mime
View raw message