hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nzh...@apache.org
Subject svn commit: r935119 - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/test/org/apache/hadoop/hive/ql/exec/ ql/src/test/results/clientnegative/
Date Sat, 17 Apr 2010 03:50:43 GMT
Author: nzhang
Date: Sat Apr 17 03:50:43 2010
New Revision: 935119

URL: http://svn.apache.org/viewvc?rev=935119&view=rev
Log:
HIVE-1306. Clean up the JobScratchDir (Namit Jain via Ning Zhang)

Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
    hadoop/hive/trunk/ql/src/test/results/clientnegative/fs_default_name1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/fs_default_name2.q.out

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Sat Apr 17 03:50:43 2010
@@ -181,6 +181,9 @@ Trunk -  Unreleased
     HIVE-1296. CLI set and set -v commands should dump properties in
     alphabetical order (John Sichi via Ning Zhang)
 
+    HIVE-1306. cleanup the JobScratchDir
+    (Namit Jain via Ning Zhang)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java Sat Apr 17 03:50:43
2010
@@ -64,6 +64,10 @@ public class Context {
   // "/tmp/"+System.getProperty("user.name")+"/hive"
   private Path MRScratchDir;
 
+  // all query specific directories are created as sub-directories of queryPath
+  private Path queryPath;
+
+
   // allScratchDirs contains all scratch directories including
   // localScratchDir and MRScratchDir.
   // The external scratch dirs will be also based on hive.exec.scratchdir.
@@ -82,18 +86,19 @@ public class Context {
 
   /**
    * Create a Context with a given executionId.  ExecutionId, together with
-   * user name and conf, will determine the temporary directory locations. 
+   * user name and conf, will determine the temporary directory locations.
    */
   public Context(HiveConf conf, String executionId) throws IOException {
     this.conf = conf;
     this.executionId = executionId;
     Path tmpPath = new Path(conf.getVar(HiveConf.ConfVars.SCRATCHDIR));
     scratchPath = tmpPath.toUri().getPath();
+    queryPath = new Path(conf.getVar(HiveConf.ConfVars.SCRATCHDIR), executionId);
   }
 
   /**
    * Set the context on whether the current query is an explain query.
-   * 
+   *
    * @param value
    *          true if the query is an explain query, false if not
    */
@@ -103,7 +108,7 @@ public class Context {
 
   /**
    * Find out whether the current query is an explain query.
-   * 
+   *
    * @return true if the query is an explain query, false if not
    */
   public boolean getExplain() {
@@ -114,14 +119,13 @@ public class Context {
    * Make a tmp directory for MR intermediate data If URI/Scheme are not
    * supplied - those implied by the default filesystem will be used (which will
    * typically correspond to hdfs instance on hadoop cluster).
-   * 
+   *
    * @param mkdir  if true, will make the directory. Will throw IOException if that fails.
    */
-  private static Path makeMRScratchDir(HiveConf conf, String executionId, boolean mkdir)
+  private Path makeMRScratchDir(HiveConf conf, boolean mkdir)
       throws IOException {
 
-    Path dir = FileUtils.makeQualified(
-        new Path(conf.getVar(HiveConf.ConfVars.SCRATCHDIR), executionId), conf);
+    Path dir = FileUtils.makeQualified(queryPath, conf);
 
     if (mkdir) {
       FileSystem fs = dir.getFileSystem(conf);
@@ -136,11 +140,10 @@ public class Context {
    * Make a tmp directory on specified URI Currently will use the same path as
    * implied by SCRATCHDIR config variable.
    */
-  private static Path makeExternalScratchDir(HiveConf conf, String executionId,
-      boolean mkdir, URI extURI) throws IOException {
+  private Path makeExternalScratchDir(HiveConf conf, boolean mkdir, URI extURI)
+    throws IOException {
 
-    Path dir = new Path(extURI.getScheme(), extURI.getAuthority(),
-        conf.getVar(HiveConf.ConfVars.SCRATCHDIR) + Path.SEPARATOR + executionId);
+    Path dir = new Path(extURI.getScheme(), extURI.getAuthority(), queryPath.toString());
 
     if (mkdir) {
       FileSystem fs = dir.getFileSystem(conf);
@@ -153,7 +156,7 @@ public class Context {
 
   /**
    * Make a tmp directory for local file system.
-   * 
+   *
    * @param mkdir  if true, will make the directory. Will throw IOException if that fails.
    */
   private static Path makeLocalScratchDir(HiveConf conf, String executionId, boolean mkdir)
@@ -181,7 +184,7 @@ public class Context {
       String fileSystem = extURI.getScheme() + ":" + extURI.getAuthority();
       Path dir = externalScratchDirs.get(fileSystem);
       if (dir == null) {
-        dir = makeExternalScratchDir(conf, executionId, !explain, extURI);
+        dir = makeExternalScratchDir(conf, !explain, extURI);
         externalScratchDirs.put(fileSystem, dir);
       }
       return dir.toString();
@@ -196,7 +199,7 @@ public class Context {
   public String getMRScratchDir() {
     try {
       if (MRScratchDir == null) {
-        MRScratchDir = makeMRScratchDir(conf, executionId, !explain);
+        MRScratchDir = makeMRScratchDir(conf, !explain);
       }
       return MRScratchDir.toString();
     } catch (IOException e) {
@@ -264,7 +267,7 @@ public class Context {
   /**
    * Check if path is tmp path. the assumption is that all uri's relative to
    * scratchdir are temporary.
-   * 
+   *
    * @return true if a uri is a temporary uri for map-reduce intermediate data,
    *         false otherwise
    */
@@ -274,7 +277,7 @@ public class Context {
 
   /**
    * Get a path to store map-reduce intermediate data in.
-   * 
+   *
    * @return next available path for map-red intermediate data
    */
   public String getMRTmpFileURI() {
@@ -283,7 +286,7 @@ public class Context {
 
   /**
    * Get a tmp path on local host to store intermediate data.
-   * 
+   *
    * @return next available tmp path on local fs
    */
   public String getLocalTmpFileURI() {
@@ -292,7 +295,7 @@ public class Context {
 
   /**
    * Get a path to store tmp data destined for external URI.
-   * 
+   *
    * @param extURI
    *          external URI to which the tmp data has to be eventually moved
    * @return next available tmp path on the file system corresponding extURI
@@ -429,7 +432,7 @@ public class Context {
    * activities; for example, when we encounter a reference to a view, we switch
    * to a new stream for parsing the stored view definition from the catalog,
    * but we don't clobber the top-level stream in the context.
-   * 
+   *
    * @param tokenRewriteStream
    *          the stream being used
    */
@@ -451,7 +454,7 @@ public class Context {
    * Generate a unique executionId.  An executionId, together with user name and
    * the configuration, will determine the temporary locations of all intermediate
    * files.
-   * 
+   *
    * In the future, users can use the executionId to resume a query.
    */
   public static String generateExecutionId() {
@@ -462,4 +465,11 @@ public class Context {
     return executionId;
   }
 
+  public Path getQueryPath() {
+    return queryPath;
+  }
+
+  public void setQueryPath(Path queryPath) {
+    this.queryPath = queryPath;
+  }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Sat Apr 17 03:50:43
2010
@@ -487,7 +487,7 @@ public class Driver implements CommandPr
       Queue<Task<? extends Serializable>> runnable = new LinkedList<Task<?
extends Serializable>>();
       Map<TaskResult, TaskRunner> running = new HashMap<TaskResult, TaskRunner>();
 
-      DriverContext driverCxt = new DriverContext(runnable);
+      DriverContext driverCxt = new DriverContext(runnable, ctx);
 
       // Add root Tasks to runnable
 
@@ -731,7 +731,9 @@ public class Driver implements CommandPr
 
   public int close() {
     try {
-      ctx.clear();
+      if (ctx != null) {
+        ctx.clear();
+      }
     } catch (Exception e) {
       console.printError("FAILED: Hive Internal Error: " + Utilities.getNameMessage(e) +
"\n"
           + org.apache.hadoop.util.StringUtils.stringifyException(e));

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java Sat Apr 17
03:50:43 2010
@@ -22,6 +22,7 @@ import java.io.Serializable;
 import java.util.LinkedList;
 import java.util.Queue;
 
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Task;
 
 /**
@@ -35,8 +36,16 @@ public class DriverContext {
   // how many jobs have been started
   int curJobNo;
 
-  public DriverContext(Queue<Task<? extends Serializable>> runnable) {
+  Context ctx;
+
+  public DriverContext() {
+    this.runnable = null;
+    this.ctx = null;
+  }
+
+  public DriverContext(Queue<Task<? extends Serializable>> runnable, Context
ctx) {
     this.runnable = runnable;
+    this.ctx = ctx;
   }
 
   public Queue<Task<? extends Serializable>> getRunnable() {
@@ -45,7 +54,7 @@ public class DriverContext {
 
   /**
    * Checks if a task can be launched.
-   * 
+   *
    * @param tsk
    *          the task to be checked
    * @return true if the task is launchable, false otherwise
@@ -66,8 +75,11 @@ public class DriverContext {
     return curJobNo;
   }
 
+  public Context getCtx() {
+    return ctx;
+  }
+
   public void incCurJobNo(int amount) {
     this.curJobNo = this.curJobNo + amount;
   }
-
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java Sat
Apr 17 03:50:43 2010
@@ -76,7 +76,7 @@ public class ConditionalTask extends Tas
   }
 
   @Override
-  public int execute() {
+  public int execute(DriverContext driverContext) {
     resTasks = resolver.getTasks(conf, resolverCtx);
     resolved = true;
     for (Task<? extends Serializable> tsk : getListTasks()) {
@@ -86,7 +86,7 @@ public class ConditionalTask extends Tas
             + Utilities.randGen.nextInt())
             + ", job is filtered out (removed at runtime).");
         if(tsk.isMapRedTask()) {
-          driverContext.incCurJobNo(1);          
+          driverContext.incCurJobNo(1);
         }
         if (tsk.getChildTasks() != null) {
           for (Task<? extends Serializable> child : tsk.getChildTasks()) {
@@ -186,7 +186,7 @@ public class ConditionalTask extends Tas
    * Add a dependent task on the current conditional task. The task will not be
    * a direct child of conditional task. Actually it will be added as child task
    * of associated tasks.
-   * 
+   *
    * @return true if the task got added false if it already existed
    */
   @Override

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java Sat Apr 17
03:50:43 2010
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.parse.L
 import org.apache.hadoop.hive.ql.plan.CopyWork;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.hive.ql.DriverContext;
 
 /**
  * CopyTask implementation.
@@ -41,7 +42,7 @@ public class CopyTask extends Task<CopyW
   }
 
   @Override
-  public int execute() {
+  public int execute(DriverContext driverContext) {
     FileSystem dstFs = null;
     Path toPath = null;
     try {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Sat Apr 17 03:50:43
2010
@@ -113,7 +113,7 @@ public class DDLTask extends Task<DDLWor
   }
 
   @Override
-  public int execute() {
+  public int execute(DriverContext driverContext) {
 
     // Create the db
     Hive db;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java Sat Apr 17
03:50:43 2010
@@ -509,7 +509,7 @@ public class ExecDriver extends Task<Map
    * Execute a query plan using Hadoop.
    */
   @Override
-  public int execute() {
+  public int execute(DriverContext driverContext) {
 
     success = true;
 
@@ -528,7 +528,12 @@ public class ExecDriver extends Task<Map
       throw new RuntimeException("Plan invalid, Reason: " + invalidReason);
     }
 
-    String hiveScratchDir = HiveConf.getVar(job, HiveConf.ConfVars.SCRATCHDIR);
+    String hiveScratchDir;
+    if (driverContext.getCtx() != null && driverContext.getCtx().getQueryPath() !=
null)
+      hiveScratchDir = driverContext.getCtx().getQueryPath().toString();
+    else
+      hiveScratchDir = HiveConf.getVar(job, HiveConf.ConfVars.SCRATCHDIR);
+
     String jobScratchDirStr = hiveScratchDir + File.separator
         + Utilities.randGen.nextInt();
     Path jobScratchDir = new Path(jobScratchDirStr);
@@ -647,7 +652,7 @@ public class ExecDriver extends Task<Map
     try {
       addInputPaths(job, work, emptyScratchDirStr);
 
-      Utilities.setMapRedWork(job, work);
+      Utilities.setMapRedWork(job, work, hiveScratchDir);
 
       // remove the pwd from conf file so that job tracker doesn't show this
       // logs
@@ -1044,7 +1049,7 @@ public class ExecDriver extends Task<Map
     MapredWork plan = Utilities.deserializeMapRedWork(pathData, conf);
     ExecDriver ed = new ExecDriver(plan, conf, isSilent);
 
-    int ret = ed.execute();
+    int ret = ed.execute(new DriverContext());
     if (ret != 0) {
       System.out.println("Job Failed");
       System.exit(2);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Sat Apr
17 03:50:43 2010
@@ -37,6 +37,8 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExplainWork;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.hive.ql.DriverContext;
+
 
 /**
  * ExplainTask implementation.
@@ -50,7 +52,7 @@ public class ExplainTask extends Task<Ex
   }
 
   @Override
-  public int execute() {
+  public int execute(DriverContext driverContext) {
 
     try {
       Path resFile = new Path(work.getResFile());

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java Sat Apr 17
03:50:43 2010
@@ -77,7 +77,7 @@ public class FetchTask extends Task<Fetc
   }
 
   @Override
-  public int execute() {
+  public int execute(DriverContext driverContext) {
     assert false;
     return 0;
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java Sat Apr
17 03:50:43 2010
@@ -56,7 +56,7 @@ public class FunctionTask extends Task<F
   }
 
   @Override
-  public int execute() {
+  public int execute(DriverContext driverContext) {
     CreateFunctionDesc createFunctionDesc = work.getCreateFunctionDesc();
     if (createFunctionDesc != null) {
       return createFunction(createFunctionDesc);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java Sat Apr 17
03:50:43 2010
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.ql.plan.Ma
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.ql.DriverContext;
 
 /**
  * Alternate implementation (to ExecDriver) of spawning a mapreduce task that
@@ -52,7 +53,7 @@ public class MapRedTask extends Task<Map
   }
 
   @Override
-  public int execute() {
+  public int execute(DriverContext driverContext) {
 
     try {
       // enable assertion
@@ -80,7 +81,13 @@ public class MapRedTask extends Task<Map
       }
       // Generate the hiveConfArgs after potentially adding the jars
       String hiveConfArgs = ExecDriver.generateCmdLine(conf);
-      File scratchDir = new File(conf.getVar(HiveConf.ConfVars.SCRATCHDIR));
+      String hiveScratchDir;
+      if (driverContext.getCtx() != null && driverContext.getCtx().getQueryPath()
!= null)
+        hiveScratchDir = driverContext.getCtx().getQueryPath().toString();
+      else
+        hiveScratchDir = conf.getVar(HiveConf.ConfVars.SCRATCHDIR);
+
+      File scratchDir = new File(hiveScratchDir);
 
       // Check if the scratch directory exists. If not, create it.
       if (!scratchDir.exists()) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java Sat Apr 17
03:50:43 2010
@@ -45,6 +45,7 @@ import org.apache.hadoop.hive.ql.plan.Lo
 import org.apache.hadoop.hive.ql.plan.MoveWork;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.util.StringUtils;
 
 /**
@@ -59,7 +60,7 @@ public class MoveTask extends Task<MoveW
   }
 
   @Override
-  public int execute() {
+  public int execute(DriverContext driverContext) {
 
     try {
       // Do any hive related operations like moving tables and files

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java Sat Apr 17 03:50:43
2010
@@ -94,7 +94,7 @@ public abstract class Task<T extends Ser
   /**
    * This method is called in the Driver on every task. It updates counters and
    * calls execute(), which is overridden in each task
-   * 
+   *
    * @return return value of execute()
    */
   public int executeTask() {
@@ -104,7 +104,7 @@ public abstract class Task<T extends Ser
       if (ss != null) {
         ss.getHiveHistory().logPlanProgress(queryPlan);
       }
-      int retval = execute();
+      int retval = execute(driverContext);
       this.setDone();
       if (ss != null) {
         ss.getHiveHistory().logPlanProgress(queryPlan);
@@ -118,15 +118,15 @@ public abstract class Task<T extends Ser
   /**
    * This method is overridden in each Task. TODO execute should return a
    * TaskHandle.
-   * 
+   *
    * @return status of executing the task
    */
-  protected abstract int execute();
+  protected abstract int execute(DriverContext driverContext);
 
   /**
    * Update the progress of the task within taskHandle and also dump the
    * progress information to the history file.
-   * 
+   *
    * @param taskHandle
    *          task handle returned by execute
    * @throws IOException
@@ -164,7 +164,7 @@ public abstract class Task<T extends Ser
   /**
    * Add a dependent task on the current task. Return if the dependency already
    * existed or is this a new one
-   * 
+   *
    * @return true if the task got added false if it already existed
    */
   public boolean addDependentTask(Task<? extends Serializable> dependent) {
@@ -187,7 +187,7 @@ public abstract class Task<T extends Ser
 
   /**
    * Remove the dependent task.
-   * 
+   *
    * @param dependent
    *          the task to remove
    */
@@ -284,7 +284,7 @@ public abstract class Task<T extends Ser
   /**
    * Should be overridden to return the type of the specific task among the
    * types in TaskType.
-   * 
+   *
    * @return TaskTypeType.* or -1 if not overridden
    */
   public int getType() {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Sat Apr 17
03:50:43 2010
@@ -279,12 +279,11 @@ public final class Utilities {
 
   }
 
-  public static void setMapRedWork(Configuration job, MapredWork w) {
+  public static void setMapRedWork(Configuration job, MapredWork w, String hiveScratchDir)
{
     try {
       // use the default file system of the job
       FileSystem fs = FileSystem.get(job);
-      Path planPath = new Path(HiveConf.getVar(job,
-          HiveConf.ConfVars.SCRATCHDIR), "plan." + randGen.nextInt());
+      Path planPath = new Path(hiveScratchDir, "plan." + randGen.nextInt());
       FSDataOutputStream out = fs.create(planPath);
       serializeMapRedWork(w, out);
       HiveConf.setVar(job, HiveConf.ConfVars.PLAN, planPath.toString());

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java Sat Apr 17
03:50:43 2010
@@ -87,7 +87,7 @@ public class TestPlan extends TestCase {
       // store into configuration
       JobConf job = new JobConf(TestPlan.class);
       job.set("fs.default.name", "file:///");
-      Utilities.setMapRedWork(job, mrwork);
+      Utilities.setMapRedWork(job, mrwork,"/tmp/" + System.getProperty("user.name") + "/hive");
       MapredWork mrwork2 = Utilities.getMapRedWork(job);
       Utilities.clearMapRedWork(job);
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/fs_default_name1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/fs_default_name1.q.out?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/fs_default_name1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/fs_default_name1.q.out Sat Apr 17
03:50:43 2010
@@ -1,15 +1,15 @@
 FAILED: Hive Internal Error: java.lang.RuntimeException(Error while making local scratch
directory - check filesystem config (java.net.URISyntaxException: Illegal character in scheme
name at index 0: 'http://www.example.com))
 java.lang.RuntimeException: Error while making local scratch directory - check filesystem
config (java.net.URISyntaxException: Illegal character in scheme name at index 0: 'http://www.example.com)
-	at org.apache.hadoop.hive.ql.Context.getLocalScratchDir(Context.java:222)
-	at org.apache.hadoop.hive.ql.Context.getLocalTmpFileURI(Context.java:290)
-	at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeInternal(DDLSemanticAnalyzer.java:101)
-	at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:128)
+	at org.apache.hadoop.hive.ql.Context.getLocalScratchDir(Context.java:225)
+	at org.apache.hadoop.hive.ql.Context.getLocalTmpFileURI(Context.java:293)
+	at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeInternal(DDLSemanticAnalyzer.java:102)
+	at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:126)
 	at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:304)
 	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:377)
 	at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:138)
 	at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:197)
-	at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:502)
-	at org.apache.hadoop.hive.cli.TestNegativeCliDriver.testNegativeCliDriver_fs_default_name1(TestNegativeCliDriver.java:1564)
+	at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:504)
+	at org.apache.hadoop.hive.cli.TestNegativeCliDriver.testNegativeCliDriver_fs_default_name1(TestNegativeCliDriver.java:54)
 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
@@ -27,11 +27,11 @@ java.lang.RuntimeException: Error while 
 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
 Caused by: java.lang.IllegalArgumentException
 	at java.net.URI.create(URI.java:842)
-	at org.apache.hadoop.fs.FileSystem.getDefaultUri(FileSystem.java:116)
-	at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:196)
-	at org.apache.hadoop.fs.FileSystem.getLocal(FileSystem.java:179)
-	at org.apache.hadoop.hive.ql.Context.makeLocalScratchDir(Context.java:162)
-	at org.apache.hadoop.hive.ql.Context.getLocalScratchDir(Context.java:216)
+	at org.apache.hadoop.fs.FileSystem.getDefaultUri(FileSystem.java:103)
+	at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:184)
+	at org.apache.hadoop.fs.FileSystem.getLocal(FileSystem.java:167)
+	at org.apache.hadoop.hive.ql.Context.makeLocalScratchDir(Context.java:165)
+	at org.apache.hadoop.hive.ql.Context.getLocalScratchDir(Context.java:219)
 	... 24 more
 Caused by: java.net.URISyntaxException: Illegal character in scheme name at index 0: 'http://www.example.com
 	at java.net.URI$Parser.fail(URI.java:2809)

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/fs_default_name2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/fs_default_name2.q.out?rev=935119&r1=935118&r2=935119&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/fs_default_name2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/fs_default_name2.q.out Sat Apr 17
03:50:43 2010
@@ -1,16 +1,16 @@
 FAILED: Hive Internal Error: java.lang.RuntimeException(Error while making MR scratch directory
- check filesystem config (java.net.URISyntaxException: Illegal character in scheme name at
index 0: 'http://www.example.com))
 java.lang.RuntimeException: Error while making MR scratch directory - check filesystem config
(java.net.URISyntaxException: Illegal character in scheme name at index 0: 'http://www.example.com)
-	at org.apache.hadoop.hive.ql.Context.getMRScratchDir(Context.java:205)
-	at org.apache.hadoop.hive.ql.Context.getMRTmpFileURI(Context.java:281)
-	at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.getMetaData(SemanticAnalyzer.java:795)
-	at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:5912)
-	at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:128)
+	at org.apache.hadoop.hive.ql.Context.getMRScratchDir(Context.java:208)
+	at org.apache.hadoop.hive.ql.Context.getMRTmpFileURI(Context.java:284)
+	at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.getMetaData(SemanticAnalyzer.java:806)
+	at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:6041)
+	at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:126)
 	at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:304)
 	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:377)
 	at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:138)
 	at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:197)
-	at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:502)
-	at org.apache.hadoop.hive.cli.TestNegativeCliDriver.testNegativeCliDriver_fs_default_name2(TestNegativeCliDriver.java:1594)
+	at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:504)
+	at org.apache.hadoop.hive.cli.TestNegativeCliDriver.testNegativeCliDriver_fs_default_name2(TestNegativeCliDriver.java:54)
 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
@@ -28,10 +28,10 @@ java.lang.RuntimeException: Error while 
 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
 Caused by: java.lang.IllegalArgumentException
 	at java.net.URI.create(URI.java:842)
-	at org.apache.hadoop.fs.FileSystem.getDefaultUri(FileSystem.java:116)
-	at org.apache.hadoop.hive.common.FileUtils.makeQualified(FileUtils.java:56)
-	at org.apache.hadoop.hive.ql.Context.makeMRScratchDir(Context.java:123)
-	at org.apache.hadoop.hive.ql.Context.getMRScratchDir(Context.java:199)
+	at org.apache.hadoop.fs.FileSystem.getDefaultUri(FileSystem.java:103)
+	at org.apache.hadoop.hive.common.FileUtils.makeQualified(FileUtils.java:58)
+	at org.apache.hadoop.hive.ql.Context.makeMRScratchDir(Context.java:128)
+	at org.apache.hadoop.hive.ql.Context.getMRScratchDir(Context.java:202)
 	... 25 more
 Caused by: java.net.URISyntaxException: Illegal character in scheme name at index 0: 'http://www.example.com
 	at java.net.URI$Parser.fail(URI.java:2809)



Mime
View raw message