hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From na...@apache.org
Subject svn commit: r955419 - in /hadoop/hive/trunk: ./ cli/src/java/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/conf/ conf/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/had...
Date Wed, 16 Jun 2010 22:50:37 GMT
Author: namit
Date: Wed Jun 16 22:50:36 2010
New Revision: 955419

URL: http://svn.apache.org/viewvc?rev=955419&view=rev
Log:
HIVE-543. Add local mode execution in hive
(Joydeep Sen Sarma via namit)


Added:
    hadoop/hive/trunk/conf/hive-exec-log4j.properties
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/build-common.xml
    hadoop/hive/trunk/build.xml
    hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
    hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
    hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    hadoop/hive/trunk/ql/src/test/results/clientpositive/filter_join_breaktask.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join33.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union22.q.out

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Wed Jun 16 22:50:36 2010
@@ -93,6 +93,9 @@ Trunk -  Unreleased
     HIVE-1179. Add UDF array_contains
     (Arvind Prabhakar via namit)
 
+    HIVE-543. Add local mode execution in hive
+    (Joydeep Sen Sarma via namit)
+
     HIVE-1255. Add mathematical UDFs PI, E, degrees, radians, tan,
     sign, and atan.  (Edward Capriolo via jvs)
 

Modified: hadoop/hive/trunk/build-common.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/build-common.xml?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/build-common.xml (original)
+++ hadoop/hive/trunk/build-common.xml Wed Jun 16 22:50:36 2010
@@ -52,6 +52,7 @@
   <property name="test.junit.output.usefile" value="true"/>
   <property name="minimr.query.files" value="join1.q,groupby1.q"/>
   <property name="test.silent" value="true"/>
+  <property name="test.serialize.qplan" value="true"/>
 
   <path id="test.classpath">
     <pathelement location="${test.build.classes}" />
@@ -208,7 +209,7 @@
     <fileset dir="${build.dir.hive}" includes="*/*.jar"/>
     <fileset dir="${hive.root}/lib" includes="*.jar"/>
     <fileset dir="${hive.root}/ql/lib" includes="*.jar"/>
-    <fileset dir="${build.dir.hive}/ivy/lib/metastore/default" includes="*.jar"  excludes="*hadoop*.jar" 
+    <fileset dir="${build.dir.hive}/ivy/lib/metastore/default" includes="*.jar"  excludes="*hadoop*.jar"
 	erroronmissingdir="false" />
   </path>
 

Modified: hadoop/hive/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/build.xml?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/build.xml (original)
+++ hadoop/hive/trunk/build.xml Wed Jun 16 22:50:36 2010
@@ -115,7 +115,7 @@
     <fileset dir="${hive.root}" includes="hive-*.jar"/>
     <fileset dir="${hive.root}/lib" includes="*.jar"/>
     <fileset dir="${hive.root}/ql/lib" includes="*.jar"/>
-    <fileset dir="${build.dir.hive}/ivy/lib/metastore/default" includes="*.jar" 
+    <fileset dir="${build.dir.hive}/ivy/lib/metastore/default" includes="*.jar"
 	excludes="*hadoop*.jar" erroronmissingdir="false"/>
   </path>
 
@@ -253,7 +253,9 @@
         <filter token="VERSION" value="${version}"/>
       </filterset>
     </copy>
-    <copy file="${basedir}/conf/hive-log4j.properties" todir="${target.conf.dir}"/>
+    <copy todir="${target.conf.dir}">
+     <fileset dir="${basedir}/conf" includes="*.properties"/>
+    </copy>
     <!-- Create php thrift package -->
     <copy todir="${target.lib.dir}/php">
      <fileset dir="${hive.root}/service/lib/php" excludes="**.svn"/>
@@ -516,14 +518,14 @@
   <!-- ================================================================== -->
 
   <import file="${hive.root}/build-common.xml"/>
-  
+
   <target name="checkstyle" depends="ivy-retrieve-checkstyle,check-for-checkstyle"
           if="checkstyle.present"
           description="Run Checkstyle on source files">
     <taskdef resource="checkstyletask.properties">
       <classpath refid="checkstyle-classpath"/>
     </taskdef>
-    
+
     <mkdir dir="${checkstyle.build.dir}"/>
 
     <checkstyle config="${checkstyle.conf.dir}/checkstyle.xml"
@@ -542,11 +544,11 @@
           out="${checkstyle.build.dir}/checkstyle-errors.html"/>
 
   </target>
-  
+
   <target name="check-for-checkstyle">
     <available property="checkstyle.present" resource="checkstyletask.properties">
       <classpath refid="checkstyle-classpath"/>
-    </available>  	
+    </available>
   </target>
 
 </project>

Modified: hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (original)
+++ hadoop/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java Wed Jun 16 22:50:36 2010
@@ -53,7 +53,7 @@ import org.apache.hadoop.hive.shims.Shim
 
 /**
  * CliDriver.
- * 
+ *
  */
 public class CliDriver {
 
@@ -224,8 +224,7 @@ public class CliDriver {
     }
 
     // NOTE: It is critical to do this here so that log4j is reinitialized
-    // before
-    // any of the other core hive classes are loaded
+    // before any of the other core hive classes are loaded
     SessionState.initHiveLog4j();
 
     CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));

Modified: hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java (original)
+++ hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java Wed Jun 16 22:50:36 2010
@@ -63,18 +63,22 @@ public final class FileUtils {
     String scheme = pathUri.getScheme();
     String authority = pathUri.getAuthority();
 
-    if (scheme != null && (authority != null || fsUri.getAuthority() == null)) {
-      return path;
-    }
+    // validate/fill-in scheme and authority. this follows logic
+    // identical to FileSystem.get(URI, conf) - but doesn't actually
+    // obtain a file system handle
 
     if (scheme == null) {
+      // no scheme - use default file system uri
       scheme = fsUri.getScheme();
-    }
-
-    if (authority == null) {
       authority = fsUri.getAuthority();
-      if (authority == null) {
-        authority = "";
+    } else {
+      if(authority == null) {
+        // no authority - use default one if it applies
+        if(scheme.equals(fsUri.getScheme()) &&
+           fsUri.getAuthority() != null)
+          authority = fsUri.getAuthority();
+        else
+          authority = "";
       }
     }
 

Modified: hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Wed Jun 16 22:50:36 2010
@@ -145,6 +145,8 @@ public class HiveConf extends Configurat
 
     // session identifier
     HIVESESSIONID("hive.session.id", ""),
+    // whether session is running in silent mode or not
+    HIVESESSIONSILENT("hive.session.silent", false),
 
     // query being executed (multiple per session)
     HIVEQUERYSTRING("hive.query.string", ""),

Added: hadoop/hive/trunk/conf/hive-exec-log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/conf/hive-exec-log4j.properties?rev=955419&view=auto
==============================================================================
--- hadoop/hive/trunk/conf/hive-exec-log4j.properties (added)
+++ hadoop/hive/trunk/conf/hive-exec-log4j.properties Wed Jun 16 22:50:36 2010
@@ -0,0 +1,55 @@
+# Define some default values that can be overridden by system properties
+hive.root.logger=INFO,FA
+hive.log.dir=/tmp/${user.name}
+hive.log.file=${hive.query.id}.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hive.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=WARN
+
+#
+# File Appender
+#
+
+log4j.appender.FA=org.apache.log4j.FileAppender
+log4j.appender.FA.File=${hive.log.dir}/${hive.log.file}
+log4j.appender.FA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+log4j.appender.FA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#custom logging levels
+#log4j.logger.xxx=DEBUG
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
+
+
+log4j.category.DataNucleus=ERROR,FA
+log4j.category.Datastore=ERROR,FA
+log4j.category.Datastore.Schema=ERROR,FA
+log4j.category.JPOX.Datastore=ERROR,FA
+log4j.category.JPOX.Plugin=ERROR,FA
+log4j.category.JPOX.MetaData=ERROR,FA
+log4j.category.JPOX.Query=ERROR,FA
+log4j.category.JPOX.General=ERROR,FA
+log4j.category.JPOX.Enhancer=ERROR,FA
+

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java Wed Jun 16 22:50:36 2010
@@ -51,26 +51,27 @@ public class Context {
   private int resDirFilesNum;
   boolean initialized;
 
+  // all query specific directories are created as sub-directories of queryPath
+  // this applies to all non-local (ie. hdfs) file system tmp folders
+  private Path queryScratchPath;
+
+
   // Path without a file system
-  // hive.exec.scratchdir: default: "/tmp/"+System.getProperty("user.name")+"/hive"
-  // Used for creating temporary path on external file systems
-  private String scratchPath;
-  // Path on the local file system
+  // Used for creating temporary directory on local file system
+  private String localScratchPath;
+
+
+  // Fully Qualified path on the local file system
   // System.getProperty("java.io.tmpdir") + Path.SEPARATOR
   // + System.getProperty("user.name") + Path.SEPARATOR + executionId
   private Path localScratchDir;
+
   // On the default FileSystem (usually HDFS):
   // also based on hive.exec.scratchdir which by default is
   // "/tmp/"+System.getProperty("user.name")+"/hive"
   private Path MRScratchDir;
 
-  // all query specific directories are created as sub-directories of queryPath
-  private Path queryPath;
-
-
-  // allScratchDirs contains all scratch directories including
-  // localScratchDir and MRScratchDir.
-  // The external scratch dirs will be also based on hive.exec.scratchdir.
+  // Keeps track of scratch directories created for different scheme/authority
   private final Map<String, Path> externalScratchDirs = new HashMap<String, Path>();
 
   private HiveConf conf;
@@ -91,9 +92,12 @@ public class Context {
   public Context(HiveConf conf, String executionId) throws IOException {
     this.conf = conf;
     this.executionId = executionId;
-    Path tmpPath = new Path(conf.getVar(HiveConf.ConfVars.SCRATCHDIR));
-    scratchPath = tmpPath.toUri().getPath();
-    queryPath = new Path(conf.getVar(HiveConf.ConfVars.SCRATCHDIR), executionId);
+
+    localScratchPath = System.getProperty("java.io.tmpdir")
+      + Path.SEPARATOR + System.getProperty("user.name") + Path.SEPARATOR
+      + executionId;
+
+    queryScratchPath = new Path(conf.getVar(HiveConf.ConfVars.SCRATCHDIR), executionId);
   }
 
   /**
@@ -125,7 +129,7 @@ public class Context {
   private Path makeMRScratchDir(HiveConf conf, boolean mkdir)
       throws IOException {
 
-    Path dir = FileUtils.makeQualified(queryPath, conf);
+    Path dir = FileUtils.makeQualified(queryScratchPath, conf);
 
     if (mkdir) {
       FileSystem fs = dir.getFileSystem(conf);
@@ -143,7 +147,8 @@ public class Context {
   private Path makeExternalScratchDir(HiveConf conf, boolean mkdir, URI extURI)
     throws IOException {
 
-    Path dir = new Path(extURI.getScheme(), extURI.getAuthority(), queryPath.toString());
+    Path dir = new Path(extURI.getScheme(), extURI.getAuthority(),
+                        queryScratchPath.toUri().getPath());
 
     if (mkdir) {
       FileSystem fs = dir.getFileSystem(conf);
@@ -159,13 +164,11 @@ public class Context {
    *
    * @param mkdir  if true, will make the directory. Will throw IOException if that fails.
    */
-  private static Path makeLocalScratchDir(HiveConf conf, String executionId, boolean mkdir)
+  private Path makeLocalScratchDir(boolean mkdir)
       throws IOException {
 
     FileSystem fs = FileSystem.getLocal(conf);
-    Path dir = fs.makeQualified(new Path(System.getProperty("java.io.tmpdir")
-        + Path.SEPARATOR + System.getProperty("user.name") + Path.SEPARATOR
-        + executionId));
+    Path dir = fs.makeQualified(new Path(localScratchPath));
 
     if (mkdir) {
       if (!fs.mkdirs(dir)) {
@@ -198,6 +201,11 @@ public class Context {
    */
   public String getMRScratchDir() {
     try {
+      // if we are executing entirely on the client side - then
+      // just (re)use the local scratch directory
+      if(isLocalOnlyExecutionMode())
+        return getLocalScratchDir();
+
       if (MRScratchDir == null) {
         MRScratchDir = makeMRScratchDir(conf, !explain);
       }
@@ -216,7 +224,7 @@ public class Context {
   public String getLocalScratchDir() {
     try {
       if (localScratchDir == null) {
-        localScratchDir = makeLocalScratchDir(conf, executionId, true);
+        localScratchDir = makeLocalScratchDir(true);
       }
       return localScratchDir.toString();
     } catch (IOException e) {
@@ -272,7 +280,7 @@ public class Context {
    *         false otherwise
    */
   public boolean isMRTmpFileURI(String uriStr) {
-    return (uriStr.indexOf(scratchPath) != -1);
+    return (uriStr.indexOf(executionId) != -1);
   }
 
   /**
@@ -466,10 +474,16 @@ public class Context {
   }
 
   public Path getQueryPath() {
-    return queryPath;
+    return queryScratchPath;
   }
 
-  public void setQueryPath(Path queryPath) {
-    this.queryPath = queryPath;
+  /**
+   * Does Hive wants to run tasks entirely on the local machine
+   * (where the query is being compiled)?
+   *
+   * Today this translates into running hadoop jobs locally
+   */
+  public boolean isLocalOnlyExecutionMode() {
+    return conf.getVar(HiveConf.ConfVars.HADOOPJT).equals("local");
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Wed Jun 16 22:50:36 2010
@@ -316,25 +316,27 @@ public class Driver implements CommandPr
       // get the output schema
       schema = getSchema(sem, conf);
 
-      // Serialize the query plan
-      //   get temp file name and remove file:
-      String queryPlanFileName = ctx.getLocalScratchDir() + Path.SEPARATOR_CHAR
+      // test Only - serialize the query plan and deserialize it
+      if("true".equalsIgnoreCase(System.getProperty("test.serialize.qplan"))) {
+
+        String queryPlanFileName = ctx.getLocalScratchDir() + Path.SEPARATOR_CHAR
           + "queryplan.xml";
-      LOG.info("query plan = " + queryPlanFileName);
-      queryPlanFileName = new Path(queryPlanFileName).toUri().getPath();
+        LOG.info("query plan = " + queryPlanFileName);
+        queryPlanFileName = new Path(queryPlanFileName).toUri().getPath();
 
-      //   serialize the queryPlan
-      FileOutputStream fos = new FileOutputStream(queryPlanFileName);
-      Utilities.serializeQueryPlan(plan, fos);
-      fos.close();
-
-      //   deserialize the queryPlan
-      FileInputStream fis = new FileInputStream(queryPlanFileName);
-      QueryPlan newPlan = Utilities.deserializeQueryPlan(fis, conf);
-      fis.close();
+        //   serialize the queryPlan
+        FileOutputStream fos = new FileOutputStream(queryPlanFileName);
+        Utilities.serializeQueryPlan(plan, fos);
+        fos.close();
+
+        //   deserialize the queryPlan
+        FileInputStream fis = new FileInputStream(queryPlanFileName);
+        QueryPlan newPlan = Utilities.deserializeQueryPlan(fis, conf);
+        fis.close();
 
-      // Use the deserialized plan
-      plan = newPlan;
+        // Use the deserialized plan
+        plan = newPlan;
+      }
 
       // initialize FetchTask right here
       if (plan.getFetchTask() != null) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java Wed Jun 16 22:50:36 2010
@@ -26,6 +26,7 @@ import java.io.Serializable;
 import java.io.UnsupportedEncodingException;
 import java.net.URI;
 import java.net.URLDecoder;
+import java.net.URL;
 import java.net.URLEncoder;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
@@ -42,6 +43,7 @@ import java.util.Set;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.Log;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileStatus;
@@ -78,6 +80,8 @@ import org.apache.hadoop.mapred.RunningJ
 import org.apache.hadoop.mapred.TaskCompletionEvent;
 import org.apache.log4j.BasicConfigurator;
 import org.apache.log4j.varia.NullAppender;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.PropertyConfigurator;
 
 /**
  * ExecDriver.
@@ -955,28 +959,63 @@ public class ExecDriver extends Task<Map
   }
 
   private static void printUsage() {
-    System.out
+    System.err
         .println("ExecDriver -plan <plan-file> [-jobconf k1=v1 [-jobconf k2=v2] ...] "
         + "[-files <file1>[,<file2>] ...]");
     System.exit(1);
   }
 
+  /**
+   * we are running the hadoop job via a sub-command. this typically
+   * happens when we are running jobs in local mode. the log4j in this
+   * mode is controlled as follows:
+   * 1. if the admin provides a log4j properties file especially for
+   *    execution mode - then we pick that up
+   * 2. otherwise - we default to the regular hive log4j properties if
+   *    one is supplied
+   * 3. if none of the above two apply - we don't do anything - the log4j
+   *    properties would likely be determined by hadoop.
+   *
+   * The intention behind providing a separate option #1 is to be able to
+   * collect hive run time logs generated in local mode in a separate
+   * (centralized) location if desired. This mimics the behavior of hive
+   * run time logs when running against a hadoop cluster where they are available
+   * on the tasktracker nodes.
+   */
+
+  private static void setupChildLog4j(Configuration conf) {
+    URL hive_l4j = ExecDriver.class.getClassLoader().getResource
+      (SessionState.HIVE_EXEC_L4J);
+    if(hive_l4j == null)
+      hive_l4j = ExecDriver.class.getClassLoader().getResource
+      (SessionState.HIVE_L4J);
+
+    if (hive_l4j != null) {
+        // setting queryid so that log4j configuration can use it to generate
+        // per query log file
+        System.setProperty
+          (HiveConf.ConfVars.HIVEQUERYID.toString(),
+           HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID));
+        LogManager.resetConfiguration();
+        PropertyConfigurator.configure(hive_l4j);
+      }
+  }
+
   public static void main(String[] args) throws IOException, HiveException {
 
     String planFileName = null;
     ArrayList<String> jobConfArgs = new ArrayList<String>();
-    boolean isSilent = false;
+    boolean noLog = false;
     String files = null;
 
     try {
       for (int i = 0; i < args.length; i++) {
         if (args[i].equals("-plan")) {
           planFileName = args[++i];
-          System.out.println("plan = " + planFileName);
         } else if (args[i].equals("-jobconf")) {
           jobConfArgs.add(args[++i]);
-        } else if (args[i].equals("-silent")) {
-          isSilent = true;
+        } else if (args[i].equals("-nolog")) {
+          noLog = true;
         } else if (args[i].equals("-files")) {
           files = args[++i];
         }
@@ -986,26 +1025,18 @@ public class ExecDriver extends Task<Map
       printUsage();
     }
 
-    // If started from main(), and isSilent is on, we should not output
-    // any logs.
-    // To turn the error log on, please set -Dtest.silent=false
-    if (isSilent) {
-      BasicConfigurator.resetConfiguration();
-      BasicConfigurator.configure(new NullAppender());
-    }
-
-    if (planFileName == null) {
-      System.err.println("Must specify Plan File Name");
-      printUsage();
-    }
-
     JobConf conf = new JobConf(ExecDriver.class);
+    StringBuilder sb = new StringBuilder("JobConf:\n");
+
     for (String one : jobConfArgs) {
       int eqIndex = one.indexOf('=');
       if (eqIndex != -1) {
         try {
-          conf.set(one.substring(0, eqIndex), URLDecoder.decode(one
-              .substring(eqIndex + 1), "UTF-8"));
+          String key = one.substring(0, eqIndex);
+          String value = URLDecoder.decode(one.substring(eqIndex + 1),
+                                           "UTF-8");
+          conf.set(key, value);
+          sb.append(key).append("=").append(value).append("\n");
         } catch (UnsupportedEncodingException e) {
           System.err.println("Unexpected error " + e.getMessage()
               + " while encoding " + one.substring(eqIndex + 1));
@@ -1018,6 +1049,30 @@ public class ExecDriver extends Task<Map
       conf.set("tmpfiles", files);
     }
 
+    boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT);
+
+    if (noLog) {
+      // If started from main(), and noLog is on, we should not output
+      // any logs. To turn the log on, please set -Dtest.silent=false
+      BasicConfigurator.resetConfiguration();
+      BasicConfigurator.configure(new NullAppender());
+    } else {
+      setupChildLog4j(conf);
+    }
+
+    Log LOG = LogFactory.getLog(ExecDriver.class.getName());
+    LogHelper console = new LogHelper(LOG, isSilent);
+
+    if (planFileName == null) {
+      console.printError("Must specify Plan File Name");
+      printUsage();
+    }
+
+    console.printInfo("plan = " + planFileName);
+
+    // log the list of job conf parameters for reference
+    LOG.info(sb.toString());
+
     URI pathURI = (new Path(planFileName)).toUri();
     InputStream pathData;
     if (StringUtils.isEmpty(pathURI.getScheme())) {
@@ -1029,43 +1084,39 @@ public class ExecDriver extends Task<Map
       pathData = fs.open(new Path(planFileName));
     }
 
-    // workaround for hadoop-17 - libjars are not added to classpath. this
-    // affects local
-    // mode execution
-    boolean localMode = HiveConf.getVar(conf, HiveConf.ConfVars.HADOOPJT)
-        .equals("local");
-    if (localMode) {
-      String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
-      String addedJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEADDEDJARS);
-      try {
-        // see also - code in CliDriver.java
-        ClassLoader loader = conf.getClassLoader();
-        if (StringUtils.isNotBlank(auxJars)) {
-          loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars,
-              ","));
-        }
-        if (StringUtils.isNotBlank(addedJars)) {
-          loader = Utilities.addToClassPath(loader, StringUtils.split(
-              addedJars, ","));
-        }
-        conf.setClassLoader(loader);
-        // Also set this to the Thread ContextClassLoader, so new threads will
-        // inherit
-        // this class loader, and propagate into newly created Configurations by
-        // those
-        // new threads.
-        Thread.currentThread().setContextClassLoader(loader);
-      } catch (Exception e) {
-        throw new HiveException(e.getMessage(), e);
-      }
+    // this is workaround for hadoop-17 - libjars are not added to classpath of the
+    // child process. so we add it here explicitly
+
+    String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
+    String addedJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEADDEDJARS);
+    try {
+      // see also - code in CliDriver.java
+      ClassLoader loader = conf.getClassLoader();
+      if (StringUtils.isNotBlank(auxJars)) {
+        loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars,
+                                                                    ","));
+      }
+      if (StringUtils.isNotBlank(addedJars)) {
+        loader = Utilities.addToClassPath(loader, StringUtils.split(
+                                                                    addedJars, ","));
+      }
+      conf.setClassLoader(loader);
+      // Also set this to the Thread ContextClassLoader, so new threads will
+      // inherit
+      // this class loader, and propagate into newly created Configurations by
+      // those
+      // new threads.
+      Thread.currentThread().setContextClassLoader(loader);
+    } catch (Exception e) {
+      throw new HiveException(e.getMessage(), e);
     }
 
+
     MapredWork plan = Utilities.deserializeMapRedWork(pathData, conf);
     ExecDriver ed = new ExecDriver(plan, conf, isSilent);
 
     int ret = ed.execute(new DriverContext());
     if (ret != 0) {
-      System.out.println("Job Failed");
       System.exit(2);
     }
   }
@@ -1078,7 +1129,7 @@ public class ExecDriver extends Task<Map
     try {
       StringBuilder sb = new StringBuilder();
       Properties deltaP = hconf.getChangedProperties();
-      boolean localMode = hconf.getVar(HiveConf.ConfVars.HADOOPJT).equals(
+      boolean hadoopLocalMode = hconf.getVar(HiveConf.ConfVars.HADOOPJT).equals(
           "local");
       String hadoopSysDir = "mapred.system.dir";
       String hadoopWorkDir = "mapred.local.dir";
@@ -1086,7 +1137,7 @@ public class ExecDriver extends Task<Map
       for (Object one : deltaP.keySet()) {
         String oneProp = (String) one;
 
-        if (localMode
+        if (hadoopLocalMode
             && (oneProp.equals(hadoopSysDir) || oneProp.equals(hadoopWorkDir))) {
           continue;
         }
@@ -1102,9 +1153,9 @@ public class ExecDriver extends Task<Map
 
       // Multiple concurrent local mode job submissions can cause collisions in
       // working dirs
-      // Workaround is to rename map red working dir to a temp dir in such a
-      // case
-      if (localMode) {
+      // Workaround is to rename map red working dir to a temp dir in such cases
+
+      if (hadoopLocalMode) {
         sb.append("-jobconf ");
         sb.append(hadoopSysDir);
         sb.append("=");

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java Wed Jun 16 22:50:36 2010
@@ -117,7 +117,7 @@ public class MapRedTask extends Task<Map
       Utilities.serializeMapRedWork(plan, out);
 
       String isSilent = "true".equalsIgnoreCase(System
-          .getProperty("test.silent")) ? "-silent" : "";
+          .getProperty("test.silent")) ? "-nolog" : "";
 
       String jarCmd;
       if (ShimLoader.getHadoopShims().usesJobShell()) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Wed Jun 16 22:50:36 2010
@@ -139,32 +139,22 @@ public final class Utilities {
     } finally {
       // where a single process works with multiple plans - we must clear
       // the cache before working with the next plan.
-      synchronized (gWorkMap) {
-        gWorkMap.remove(getJobName(job));
-      }
+      gWorkMap.remove(getJobName(job));
     }
   }
 
   public static MapredWork getMapRedWork(Configuration job) {
     MapredWork gWork = null;
     try {
-      synchronized (gWorkMap) {
-        gWork = gWorkMap.get(getJobName(job));
-      }
+      gWork = gWorkMap.get(getJobName(job));
       if (gWork == null) {
-        synchronized (Utilities.class) {
-          if (gWork != null) {
-            return (gWork);
-          }
-          InputStream in = new FileInputStream("HIVE_PLAN"
-              + sanitizedJobId(job));
-          MapredWork ret = deserializeMapRedWork(in, job);
-          gWork = ret;
-          gWork.initialize();
-          gWorkMap.put(getJobName(job), gWork);
-        }
+        InputStream in = new FileInputStream("HIVE_PLAN"
+                                             + sanitizedJobId(job));
+        MapredWork ret = deserializeMapRedWork(in, job);
+        gWork = ret;
+        gWork.initialize();
+        gWorkMap.put(getJobName(job), gWork);
       }
-
       return (gWork);
     } catch (Exception e) {
       e.printStackTrace();
@@ -281,23 +271,28 @@ public final class Utilities {
 
   public static void setMapRedWork(Configuration job, MapredWork w, String hiveScratchDir) {
     try {
-      // use the default file system of the job
-      FileSystem fs = FileSystem.get(job);
-      Path planPath = new Path(hiveScratchDir, "plan." + randGen.nextInt());
-      FSDataOutputStream out = fs.create(planPath);
-      serializeMapRedWork(w, out);
-      HiveConf.setVar(job, HiveConf.ConfVars.PLAN, planPath.toString());
-      // Set up distributed cache
-      DistributedCache.createSymlink(job);
-      String uriWithLink = planPath.toUri().toString() + "#HIVE_PLAN"
+
+      // Serialize the plan to the default hdfs instance
+      // Except for hadoop local mode execution where we should be
+      // able to get the plan directly from the cache
+
+      if(!HiveConf.getVar(job, HiveConf.ConfVars.HADOOPJT).equals("local")) {
+        // use the default file system of the job
+        FileSystem fs = FileSystem.get(job);
+        Path planPath = new Path(hiveScratchDir, "plan." + randGen.nextInt());
+        FSDataOutputStream out = fs.create(planPath);
+        serializeMapRedWork(w, out);
+        HiveConf.setVar(job, HiveConf.ConfVars.PLAN, planPath.toString());
+        // Set up distributed cache
+        DistributedCache.createSymlink(job);
+        String uriWithLink = planPath.toUri().toString() + "#HIVE_PLAN"
           + sanitizedJobId(job);
-      DistributedCache.addCacheFile(new URI(uriWithLink), job);
-      // Cache the object in this process too so lookups don't hit the file
-      // system
-      synchronized (Utilities.class) {
-        w.initialize();
-        gWorkMap.put(getJobName(job), w);
+        DistributedCache.addCacheFile(new URI(uriWithLink), job);
       }
+
+      // Cache the plan in this process
+      w.initialize();
+      gWorkMap.put(getJobName(job), w);
     } catch (Exception e) {
       e.printStackTrace();
       throw new RuntimeException(e);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Wed Jun 16 22:50:36 2010
@@ -97,10 +97,16 @@ public class SessionState {
   }
 
   public boolean getIsSilent() {
-    return isSilent;
+    if(conf != null)
+      return conf.getBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT);
+    else
+      return isSilent;
   }
 
   public void setIsSilent(boolean isSilent) {
+    if(conf != null) {
+      conf.setBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT, isSilent);
+    }
     this.isSilent = isSilent;
   }
 
@@ -110,6 +116,7 @@ public class SessionState {
 
   public SessionState(HiveConf conf) {
     this.conf = conf;
+    isSilent = conf.getBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT);
     ls = new LineageState();
   }
 
@@ -194,6 +201,7 @@ public class SessionState {
   }
 
   public static final String HIVE_L4J = "hive-log4j.properties";
+  public static final String HIVE_EXEC_L4J = "hive-exec-log4j.properties";
 
   public static void initHiveLog4j() {
     // allow hive log4j to override any normal initialized one

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/filter_join_breaktask.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/filter_join_breaktask.q.out?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/filter_join_breaktask.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/filter_join_breaktask.q.out Wed Jun 16 22:50:36 2010
@@ -102,9 +102,9 @@ STAGE PLANS:
                         type: string
       Needs Tagging: true
       Path -> Alias:
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_0/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 [f, m]
+        file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 [f, m]
       Path -> Partition:
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_0/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 
+        file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 
           Partition
             base file name: ds=2008-04-08
             input format: org.apache.hadoop.mapred.TextInputFormat
@@ -117,13 +117,13 @@ STAGE PLANS:
               columns.types int:string
               file.inputformat org.apache.hadoop.mapred.TextInputFormat
               file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_0/build/ql/test/data/warehouse/filter_join_breaktask
+              location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/filter_join_breaktask
               name filter_join_breaktask
               partition_columns ds
               serialization.ddl struct filter_join_breaktask { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              transient_lastDdlTime 1270516061
+              transient_lastDdlTime 1276212172
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -134,13 +134,13 @@ STAGE PLANS:
                 columns.types int:string
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_0/build/ql/test/data/warehouse/filter_join_breaktask
+                location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/filter_join_breaktask
                 name filter_join_breaktask
                 partition_columns ds
                 serialization.ddl struct filter_join_breaktask { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                transient_lastDdlTime 1270516061
+                transient_lastDdlTime 1276212172
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: filter_join_breaktask
             name: filter_join_breaktask
@@ -171,7 +171,7 @@ STAGE PLANS:
                 File Output Operator
                   compressed: false
                   GlobalTableId: 0
-                  directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_0/build/ql/scratchdir/hive_2010-04-05_18-07-46_959_4055362695656574819/10002
+                  directory: file:/tmp/jssarma/hive_2010-06-10_16-22-55_084_1486227885854106344/10002
                   NumFilesPerFileSink: 1
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -226,26 +226,10 @@ STAGE PLANS:
                         type: string
       Needs Tagging: true
       Path -> Alias:
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_0/build/ql/scratchdir/hive_2010-04-05_18-07-46_959_4055362695656574819/10002 [$INTNAME]
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_0/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 [g]
+        file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 [g]
+        file:/tmp/jssarma/hive_2010-06-10_16-22-55_084_1486227885854106344/10002 [$INTNAME]
       Path -> Partition:
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_0/build/ql/scratchdir/hive_2010-04-05_18-07-46_959_4055362695656574819/10002 
-          Partition
-            base file name: 10002
-            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-            properties:
-              columns _col0,_col4
-              columns.types int,string
-              escape.delim \
-          
-              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-              properties:
-                columns _col0,_col4
-                columns.types int,string
-                escape.delim \
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_0/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 
+        file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 
           Partition
             base file name: ds=2008-04-08
             input format: org.apache.hadoop.mapred.TextInputFormat
@@ -258,13 +242,13 @@ STAGE PLANS:
               columns.types int:string
               file.inputformat org.apache.hadoop.mapred.TextInputFormat
               file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_0/build/ql/test/data/warehouse/filter_join_breaktask
+              location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/filter_join_breaktask
               name filter_join_breaktask
               partition_columns ds
               serialization.ddl struct filter_join_breaktask { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              transient_lastDdlTime 1270516061
+              transient_lastDdlTime 1276212172
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -275,16 +259,32 @@ STAGE PLANS:
                 columns.types int:string
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_0/build/ql/test/data/warehouse/filter_join_breaktask
+                location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/filter_join_breaktask
                 name filter_join_breaktask
                 partition_columns ds
                 serialization.ddl struct filter_join_breaktask { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                transient_lastDdlTime 1270516061
+                transient_lastDdlTime 1276212172
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: filter_join_breaktask
             name: filter_join_breaktask
+        file:/tmp/jssarma/hive_2010-06-10_16-22-55_084_1486227885854106344/10002 
+          Partition
+            base file name: 10002
+            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+            properties:
+              columns _col0,_col4
+              columns.types int,string
+              escape.delim \
+          
+              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+              properties:
+                columns _col0,_col4
+                columns.types int,string
+                escape.delim \
       Reduce Operator Tree:
         Join Operator
           condition map:
@@ -304,7 +304,7 @@ STAGE PLANS:
             File Output Operator
               compressed: false
               GlobalTableId: 0
-              directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_0/build/ql/scratchdir/hive_2010-04-05_18-07-46_959_4055362695656574819/10001
+              directory: file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/scratchdir/hive_2010-06-10_16-22-55_084_1486227885854106344/10001
               NumFilesPerFileSink: 1
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
@@ -326,13 +326,13 @@ FROM filter_join_breaktask f JOIN filter
 JOIN filter_join_breaktask g ON(g.value = m.value AND g.ds='2008-04-08' AND m.ds='2008-04-08' AND m.value is not null AND m.value !='')
 PREHOOK: type: QUERY
 PREHOOK: Input: default@filter_join_breaktask@ds=2008-04-08
-PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_0/build/ql/scratchdir/hive_2010-04-05_18-07-47_676_7695840532447756618/10000
+PREHOOK: Output: file:/tmp/jssarma/hive_2010-06-10_16-22-55_434_9085757094579036575/10000
 POSTHOOK: query: SELECT f.key, g.value 
 FROM filter_join_breaktask f JOIN filter_join_breaktask m ON( f.key = m.key AND f.ds='2008-04-08' AND m.ds='2008-04-08' AND f.key is not null) 
 JOIN filter_join_breaktask g ON(g.value = m.value AND g.ds='2008-04-08' AND m.ds='2008-04-08' AND m.value is not null AND m.value !='')
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@filter_join_breaktask@ds=2008-04-08
-POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_0/build/ql/scratchdir/hive_2010-04-05_18-07-47_676_7695840532447756618/10000
+POSTHOOK: Output: file:/tmp/jssarma/hive_2010-06-10_16-22-55_434_9085757094579036575/10000
 POSTHOOK: Lineage: filter_join_breaktask PARTITION(ds=2008-04-08).key EXPRESSION [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: filter_join_breaktask PARTITION(ds=2008-04-08).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
 146	val_146

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join33.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join33.q.out?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join33.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join33.q.out Wed Jun 16 22:50:36 2010
@@ -45,7 +45,7 @@ STAGE PLANS:
               File Output Operator
                 compressed: false
                 GlobalTableId: 0
-                directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-19-24_388_7208657809969674493/10002
+                directory: file:/tmp/jssarma/hive_2010-06-10_16-21-45_664_3974168394039456921/10002
                 NumFilesPerFileSink: 1
                 table:
                     input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -81,7 +81,7 @@ STAGE PLANS:
                   File Output Operator
                     compressed: false
                     GlobalTableId: 0
-                    directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-19-24_388_7208657809969674493/10002
+                    directory: file:/tmp/jssarma/hive_2010-06-10_16-21-45_664_3974168394039456921/10002
                     NumFilesPerFileSink: 1
                     table:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -94,9 +94,9 @@ STAGE PLANS:
                     MultiFileSpray: false
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/src [y]
+        file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/src [y]
       Path -> Partition:
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/src 
+        file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/src 
           Partition
             base file name: src
             input format: org.apache.hadoop.mapred.TextInputFormat
@@ -107,12 +107,12 @@ STAGE PLANS:
               columns.types string:string
               file.inputformat org.apache.hadoop.mapred.TextInputFormat
               file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/src
+              location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/src
               name src
               serialization.ddl struct src { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              transient_lastDdlTime 1270516764
+              transient_lastDdlTime 1276212105
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -123,12 +123,12 @@ STAGE PLANS:
                 columns.types string:string
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/src
+                location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/src
                 name src
                 serialization.ddl struct src { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                transient_lastDdlTime 1270516764
+                transient_lastDdlTime 1276212105
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
             name: src
@@ -136,7 +136,7 @@ STAGE PLANS:
   Stage: Stage-1
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-19-24_388_7208657809969674493/10002 
+        file:/tmp/jssarma/hive_2010-06-10_16-21-45_664_3974168394039456921/10002 
           Select Operator
             expressions:
                   expr: _col0
@@ -192,26 +192,10 @@ STAGE PLANS:
                           type: string
       Needs Tagging: true
       Path -> Alias:
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-19-24_388_7208657809969674493/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-19-24_388_7208657809969674493/10002]
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z]
+        file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z]
+        file:/tmp/jssarma/hive_2010-06-10_16-21-45_664_3974168394039456921/10002 [file:/tmp/jssarma/hive_2010-06-10_16-21-45_664_3974168394039456921/10002]
       Path -> Partition:
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-19-24_388_7208657809969674493/10002 
-          Partition
-            base file name: 10002
-            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-            properties:
-              columns _col0,_col1,_col3
-              columns.types string,string,string
-              escape.delim \
-          
-              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-              properties:
-                columns _col0,_col1,_col3
-                columns.types string,string,string
-                escape.delim \
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             base file name: hr=11
             input format: org.apache.hadoop.mapred.TextInputFormat
@@ -225,13 +209,13 @@ STAGE PLANS:
               columns.types string:string
               file.inputformat org.apache.hadoop.mapred.TextInputFormat
               file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/srcpart
+              location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/srcpart
               name srcpart
               partition_columns ds/hr
               serialization.ddl struct srcpart { string key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              transient_lastDdlTime 1270516762
+              transient_lastDdlTime 1276212103
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -242,16 +226,32 @@ STAGE PLANS:
                 columns.types string:string
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/srcpart
+                location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/srcpart
                 name srcpart
                 partition_columns ds/hr
                 serialization.ddl struct srcpart { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                transient_lastDdlTime 1270516762
+                transient_lastDdlTime 1276212103
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
             name: srcpart
+        file:/tmp/jssarma/hive_2010-06-10_16-21-45_664_3974168394039456921/10002 
+          Partition
+            base file name: 10002
+            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+            properties:
+              columns _col0,_col1,_col3
+              columns.types string,string,string
+              escape.delim \
+          
+              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+              properties:
+                columns _col0,_col1,_col3
+                columns.types string,string,string
+                escape.delim \
       Reduce Operator Tree:
         Join Operator
           condition map:
@@ -273,7 +273,7 @@ STAGE PLANS:
             File Output Operator
               compressed: false
               GlobalTableId: 1
-              directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-19-24_388_7208657809969674493/10000
+              directory: file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/scratchdir/hive_2010-06-10_16-21-45_664_3974168394039456921/10000
               NumFilesPerFileSink: 1
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
@@ -284,12 +284,12 @@ STAGE PLANS:
                     columns.types string:string:string
                     file.inputformat org.apache.hadoop.mapred.TextInputFormat
                     file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/dest_j1
+                    location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/dest_j1
                     name dest_j1
                     serialization.ddl struct dest_j1 { string key, string value, string val2}
                     serialization.format 1
                     serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    transient_lastDdlTime 1270516764
+                    transient_lastDdlTime 1276212105
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: dest_j1
               TotalFiles: 1
@@ -299,7 +299,7 @@ STAGE PLANS:
     Move Operator
       tables:
           replace: true
-          source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-19-24_388_7208657809969674493/10000
+          source: file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/scratchdir/hive_2010-06-10_16-21-45_664_3974168394039456921/10000
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -309,15 +309,15 @@ STAGE PLANS:
                 columns.types string:string:string
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/dest_j1
+                location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/dest_j1
                 name dest_j1
                 serialization.ddl struct dest_j1 { string key, string value, string val2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                transient_lastDdlTime 1270516764
+                transient_lastDdlTime 1276212105
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dest_j1
-          tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-19-24_388_7208657809969674493/10001
+          tmp directory: file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/scratchdir/hive_2010-06-10_16-21-45_664_3974168394039456921/10001
 
 
 PREHOOK: query: INSERT OVERWRITE TABLE dest_j1
@@ -344,11 +344,11 @@ POSTHOOK: Lineage: dest_j1.value SIMPLE 
 PREHOOK: query: select * from dest_j1 x order by x.key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest_j1
-PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-19-34_205_4586174573474743594/10000
+PREHOOK: Output: file:/tmp/jssarma/hive_2010-06-10_16-21-51_616_8853310441674539967/10000
 POSTHOOK: query: select * from dest_j1 x order by x.key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest_j1
-POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-19-34_205_4586174573474743594/10000
+POSTHOOK: Output: file:/tmp/jssarma/hive_2010-06-10_16-21-51_616_8853310441674539967/10000
 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: dest_j1.val2 EXPRESSION [(src)y.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:hr, type:string, comment:null), ]

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/union22.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/union22.q.out?rev=955419&r1=955418&r2=955419&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/union22.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/union22.q.out Wed Jun 16 22:50:36 2010
@@ -126,7 +126,7 @@ STAGE PLANS:
                   File Output Operator
                     compressed: false
                     GlobalTableId: 0
-                    directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-37-37_435_2882687629534670108/10002
+                    directory: file:/tmp/jssarma/hive_2010-06-10_16-21-01_038_1201658161362559492/10002
                     NumFilesPerFileSink: 1
                     table:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -181,7 +181,7 @@ STAGE PLANS:
                         File Output Operator
                           compressed: false
                           GlobalTableId: 0
-                          directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-37-37_435_2882687629534670108/10002
+                          directory: file:/tmp/jssarma/hive_2010-06-10_16-21-01_038_1201658161362559492/10002
                           NumFilesPerFileSink: 1
                           table:
                               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -194,9 +194,9 @@ STAGE PLANS:
                           MultiFileSpray: false
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/test/data/warehouse/dst_union22/ds=1 [null-subquery2:subq-subquery2:a]
+        file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/dst_union22/ds=1 [null-subquery2:subq-subquery2:a]
       Path -> Partition:
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/test/data/warehouse/dst_union22/ds=1 
+        file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/dst_union22/ds=1 
           Partition
             base file name: ds=1
             input format: org.apache.hadoop.mapred.TextInputFormat
@@ -209,13 +209,13 @@ STAGE PLANS:
               columns.types string:string:string:string
               file.inputformat org.apache.hadoop.mapred.TextInputFormat
               file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/test/data/warehouse/dst_union22
+              location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/dst_union22
               name dst_union22
               partition_columns ds
               serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              transient_lastDdlTime 1270517850
+              transient_lastDdlTime 1276212055
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -226,13 +226,13 @@ STAGE PLANS:
                 columns.types string:string:string:string
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/test/data/warehouse/dst_union22
+                location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/dst_union22
                 name dst_union22
                 partition_columns ds
                 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                transient_lastDdlTime 1270517850
+                transient_lastDdlTime 1276212055
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dst_union22
             name: dst_union22
@@ -240,7 +240,7 @@ STAGE PLANS:
   Stage: Stage-3
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-37-37_435_2882687629534670108/10002 
+        file:/tmp/jssarma/hive_2010-06-10_16-21-01_038_1201658161362559492/10002 
           Select Operator
             expressions:
                   expr: _col0
@@ -283,7 +283,7 @@ STAGE PLANS:
                     File Output Operator
                       compressed: false
                       GlobalTableId: 1
-                      directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-37-37_435_2882687629534670108/10000
+                      directory: file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/scratchdir/hive_2010-06-10_16-21-01_038_1201658161362559492/10000
                       NumFilesPerFileSink: 1
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
@@ -294,13 +294,13 @@ STAGE PLANS:
                             columns.types string:string:string:string
                             file.inputformat org.apache.hadoop.mapred.TextInputFormat
                             file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/test/data/warehouse/dst_union22
+                            location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/dst_union22
                             name dst_union22
                             partition_columns ds
                             serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4}
                             serialization.format 1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                            transient_lastDdlTime 1270517850
+                            transient_lastDdlTime 1276212055
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: dst_union22
                       TotalFiles: 1
@@ -344,7 +344,7 @@ STAGE PLANS:
                       File Output Operator
                         compressed: false
                         GlobalTableId: 1
-                        directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-37-37_435_2882687629534670108/10000
+                        directory: file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/scratchdir/hive_2010-06-10_16-21-01_038_1201658161362559492/10000
                         NumFilesPerFileSink: 1
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
@@ -355,39 +355,23 @@ STAGE PLANS:
                               columns.types string:string:string:string
                               file.inputformat org.apache.hadoop.mapred.TextInputFormat
                               file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                              location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/test/data/warehouse/dst_union22
+                              location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/dst_union22
                               name dst_union22
                               partition_columns ds
                               serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                              transient_lastDdlTime 1270517850
+                              transient_lastDdlTime 1276212055
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: dst_union22
                         TotalFiles: 1
                         MultiFileSpray: false
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-37-37_435_2882687629534670108/10002 [file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-37-37_435_2882687629534670108/10002]
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/test/data/warehouse/dst_union22_delta/ds=1 [null-subquery1:subq-subquery1:dst_union22_delta]
+        file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/dst_union22_delta/ds=1 [null-subquery1:subq-subquery1:dst_union22_delta]
+        file:/tmp/jssarma/hive_2010-06-10_16-21-01_038_1201658161362559492/10002 [file:/tmp/jssarma/hive_2010-06-10_16-21-01_038_1201658161362559492/10002]
       Path -> Partition:
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-37-37_435_2882687629534670108/10002 
-          Partition
-            base file name: 10002
-            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-            properties:
-              columns _col0,_col1,_col8,_col9
-              columns.types string,string,string,string
-              escape.delim \
-          
-              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-              properties:
-                columns _col0,_col1,_col8,_col9
-                columns.types string,string,string,string
-                escape.delim \
-        file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/test/data/warehouse/dst_union22_delta/ds=1 
+        file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/dst_union22_delta/ds=1 
           Partition
             base file name: ds=1
             input format: org.apache.hadoop.mapred.TextInputFormat
@@ -400,13 +384,13 @@ STAGE PLANS:
               columns.types string:string:string:string:string:string
               file.inputformat org.apache.hadoop.mapred.TextInputFormat
               file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/test/data/warehouse/dst_union22_delta
+              location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/dst_union22_delta
               name dst_union22_delta
               partition_columns ds
               serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              transient_lastDdlTime 1270517850
+              transient_lastDdlTime 1276212055
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -417,16 +401,32 @@ STAGE PLANS:
                 columns.types string:string:string:string:string:string
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/test/data/warehouse/dst_union22_delta
+                location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/dst_union22_delta
                 name dst_union22_delta
                 partition_columns ds
                 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                transient_lastDdlTime 1270517850
+                transient_lastDdlTime 1276212055
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dst_union22_delta
             name: dst_union22_delta
+        file:/tmp/jssarma/hive_2010-06-10_16-21-01_038_1201658161362559492/10002 
+          Partition
+            base file name: 10002
+            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+            properties:
+              columns _col0,_col1,_col8,_col9
+              columns.types string,string,string,string
+              escape.delim \
+          
+              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+              properties:
+                columns _col0,_col1,_col8,_col9
+                columns.types string,string,string,string
+                escape.delim \
 
   Stage: Stage-0
     Move Operator
@@ -434,7 +434,7 @@ STAGE PLANS:
           partition:
             ds 2
           replace: true
-          source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-37-37_435_2882687629534670108/10000
+          source: file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/scratchdir/hive_2010-06-10_16-21-01_038_1201658161362559492/10000
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -444,16 +444,16 @@ STAGE PLANS:
                 columns.types string:string:string:string
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/test/data/warehouse/dst_union22
+                location file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/test/data/warehouse/dst_union22
                 name dst_union22
                 partition_columns ds
                 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                transient_lastDdlTime 1270517850
+                transient_lastDdlTime 1276212055
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dst_union22
-          tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-37-37_435_2882687629534670108/10001
+          tmp directory: file:/mnt/vol/devrs004.snc1/jssarma/projects/hive_trunk/build/ql/scratchdir/hive_2010-06-10_16-21-01_038_1201658161362559492/10001
 
 
 PREHOOK: query: insert overwrite table dst_union22 partition (ds='2')
@@ -503,11 +503,11 @@ POSTHOOK: Lineage: dst_union22_delta PAR
 PREHOOK: query: select * from dst_union22 where ds = '2' order by k1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dst_union22@ds=2
-PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-37-46_228_5981306163392766995/10000
+PREHOOK: Output: file:/tmp/jssarma/hive_2010-06-10_16-21-06_919_5619601517950726872/10000
 POSTHOOK: query: select * from dst_union22 where ds = '2' order by k1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dst_union22@ds=2
-POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_2/build/ql/scratchdir/hive_2010-04-05_18-37-46_228_5981306163392766995/10000
+POSTHOOK: Output: file:/tmp/jssarma/hive_2010-06-10_16-21-06_919_5619601517950726872/10000
 POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k3 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]



Mime
View raw message