hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zs...@apache.org
Subject svn commit: r777853 - in /hadoop/hive/trunk: CHANGES.txt ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
Date Sat, 23 May 2009 09:07:33 GMT
Author: zshao
Date: Sat May 23 09:07:33 2009
New Revision: 777853

URL: http://svn.apache.org/viewvc?rev=777853&view=rev
Log:
HIVE-500. Fix select from newly created table. (Yongqiang He via zshao)

Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=777853&r1=777852&r2=777853&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Sat May 23 09:07:33 2009
@@ -163,6 +163,9 @@
     HIVE-497. Fix predicate pushdowns when all the columns are not
     selected in the query. (Prasad Chakka via athusoo)
 
+    HIVE-500. Fix select from newly created table.
+    (Yongqiang He via zshao)
+
 Release 0.3.1 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java?rev=777853&r1=777852&r2=777853&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java Sat May 23
09:07:33 2009
@@ -332,7 +332,6 @@
       throw new RuntimeException("Plan invalid, Reason: " + invalidReason);
     }
 
-    Utilities.setMapRedWork(job, work);
 
     String hiveScratchDir = HiveConf.getVar(job, HiveConf.ConfVars.SCRATCHDIR);
     Path jobScratchDir = new Path(hiveScratchDir + Utilities.randGen.nextInt());
@@ -365,6 +364,7 @@
 
     try {
       addInputPaths(job, work, hiveScratchDir);
+      Utilities.setMapRedWork(job, work);
 
       // remove the pwd from conf file so that job tracker doesn't show this logs
       String pwd = job.get(HiveConf.ConfVars.METASTOREPWD.varname);
@@ -688,13 +688,15 @@
         LOG.info("Changed input file to " + newPath.toString());
         
         // toggle the work
-        Map<String, ArrayList<String>> pathToAliases = work.getPathToAliases();
-        pathToAliases.put(newPath.toString(), pathToAliases.get(emptyFile));
+        LinkedHashMap<String, ArrayList<String>> pathToAliases = work.getPathToAliases();
+        pathToAliases.put(newPath.toUri().toString(), pathToAliases.get(emptyFile));
         pathToAliases.remove(emptyFile);
+        work.setPathToAliases(pathToAliases);
         
-        Map<String,partitionDesc> pathToPartitionInfo = work.getPathToPartitionInfo();
-        pathToPartitionInfo.put(newPath.toString(), pathToPartitionInfo.get(emptyFile));
+        LinkedHashMap<String,partitionDesc> pathToPartitionInfo = work.getPathToPartitionInfo();
+        pathToPartitionInfo.put(newPath.toUri().toString(), pathToPartitionInfo.get(emptyFile));
         pathToPartitionInfo.remove(emptyFile);
+        work.setPathToPartitionInfo(pathToPartitionInfo);
         
         String onefile = newPath.toString();
         RecordWriter recWriter = outFileFormat.newInstance().getHiveRecordWriter(job, newPath,
Text.class, false, new Properties(), null);



Mime
View raw message