hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gunt...@apache.org
Subject svn commit: r1554722 [1/3] - in /hive/branches/tez: ./ common/src/java/org/apache/hadoop/hive/conf/ conf/ hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/ ql/src/java/org/apache/...
Date Thu, 02 Jan 2014 02:42:18 GMT
Author: gunther
Date: Thu Jan  2 02:42:15 2014
New Revision: 1554722

URL: http://svn.apache.org/r1554722
Log:
Merge latest trunk into branch. (Gunther Hagleitner)

Modified:
    hive/branches/tez/   (props changed)
    hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/tez/conf/hive-default.xml.template
    hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
    hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
    hive/branches/tez/pom.xml
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/MergeWork.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateWork.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ArchiveWork.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/TruncateTableDesc.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/LineageState.java
    hive/branches/tez/ql/src/test/results/clientpositive/binary_output_format.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/bucket1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/bucket2.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/bucket3.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/bucket4.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/bucket5.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/bucketmapjoin1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/bucketmapjoin2.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/bucketmapjoin3.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/bucketmapjoin4.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/bucketmapjoin5.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby_map_ppr.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby_ppr.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby_sort_1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby_sort_6.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/input_part1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/input_part2.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join17.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join26.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join32.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join32_lessSize.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join33.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join34.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join35.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join9.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join_map_ppr.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/list_bucket_dml_10.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/load_dyn_part8.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/merge3.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/pcr.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/reduce_deduplicate.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/sample1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/sample2.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/sample4.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/sample5.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/sample6.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/sample7.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/smb_mapjoin_11.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/smb_mapjoin_12.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/stats0.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/stats11.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/stats3.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/union22.q.out
    hive/branches/tez/ql/src/test/results/compiler/plan/case_sensitivity.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/groupby1.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input1.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input2.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input3.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input4.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input5.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input6.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input7.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input9.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/join1.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/join2.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/join3.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/sample2.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/sample3.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/sample4.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/sample5.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/sample6.q.xml
    hive/branches/tez/ql/src/test/results/compiler/plan/sample7.q.xml
    hive/branches/tez/service/src/java/org/apache/hive/service/server/HiveServer2.java

Propchange: hive/branches/tez/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1554299-1554719

Modified: hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Thu Jan  2 02:42:15 2014
@@ -768,6 +768,9 @@ public class HiveConf extends Configurat
     HIVE_DDL_OUTPUT_FORMAT("hive.ddl.output.format", null),
     HIVE_ENTITY_SEPARATOR("hive.entity.separator", "@"),
 
+    HIVE_SERVER2_MAX_START_ATTEMPTS("hive.server2.max.start.attempts", 30L,
+        new LongRangeValidator(0L, Long.MAX_VALUE)),
+
     // binary or http
     HIVE_SERVER2_TRANSPORT_MODE("hive.server2.transport.mode", "binary",
         new StringsValidator("binary", "http")),
@@ -1405,6 +1408,32 @@ public class HiveConf extends Configurat
     }
   }
 
+  public static class LongRangeValidator implements Validator {
+    private final long lower, upper;
+
+    public LongRangeValidator(long lower, long upper) {
+      this.lower = lower;
+      this.upper = upper;
+    }
+
+    @Override
+    public String validate(String value) {
+      try {
+        if(value == null) {
+          return "Value cannot be null";
+        }
+        value = value.trim();
+        long lvalue = Long.parseLong(value);
+        if (lvalue < lower || lvalue > upper) {
+          return "Invalid value  " + value + ", which should be in between " + lower + " and " + upper;
+        }
+      } catch (NumberFormatException e) {
+        return e.toString();
+      }
+      return null;
+    }
+  }
+
   public static class PatternValidator implements Validator {
     private final List<Pattern> expected = new ArrayList<Pattern>();
     private PatternValidator(String... values) {

Modified: hive/branches/tez/conf/hive-default.xml.template
URL: http://svn.apache.org/viewvc/hive/branches/tez/conf/hive-default.xml.template?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/conf/hive-default.xml.template (original)
+++ hive/branches/tez/conf/hive-default.xml.template Thu Jan  2 02:42:15 2014
@@ -877,12 +877,18 @@
   <description>Read from a binary stream and treat each hive.binary.record.max.length bytes as a record.
   The last record before the end of stream can have less than hive.binary.record.max.length bytes</description>
 </property>
-    
+
+<property>
+  <name>hive.server2.max.start.attempts</name>
+  <value>30</value>
+  <description>This number of times HiveServer2 will attempt to start before exiting, sleeping 60 seconds between retries. The default of 30 will keep trying for 30 minutes.</description>
+</property>
+
 <property>
   <name>hive.server2.transport.mode</name>
   <value>binary</value>
   <description>Server transport mode. "binary" or "http".</description>
-</property>    
+</property>
 
 <property>
   <name>hive.server2.thrift.http.port</name>

Modified: hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java (original)
+++ hive/branches/tez/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java Thu Jan  2 02:42:15 2014
@@ -377,13 +377,13 @@ public class TestHCatMultiOutputFormat {
     org.apache.hadoop.hive.ql.metadata.Table tbl = hive.getTable(database, table);
     FetchWork work;
     if (tbl.getPartCols().isEmpty()) {
-      work = new FetchWork(tbl.getDataLocation().toString(), Utilities.getTableDesc(tbl));
+      work = new FetchWork(new Path(tbl.getDataLocation()), Utilities.getTableDesc(tbl));
     } else {
       List<Partition> partitions = hive.getPartitions(tbl);
       List<PartitionDesc> partDesc = new ArrayList<PartitionDesc>();
-      List<String> partLocs = new ArrayList<String>();
+      List<Path> partLocs = new ArrayList<Path>();
       for (Partition part : partitions) {
-        partLocs.add(part.getLocation());
+        partLocs.add(part.getPartitionPath());
         partDesc.add(Utilities.getPartitionDesc(part));
       }
       work = new FetchWork(partLocs, partDesc, Utilities.getTableDesc(tbl));

Modified: hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java (original)
+++ hive/branches/tez/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java Thu Jan  2 02:42:15 2014
@@ -379,15 +379,15 @@ public class TestHCatMultiOutputFormat {
     if (!tbl.getPartCols().isEmpty()) {
       List<Partition> partitions = hive.getPartitions(tbl);
       List<PartitionDesc> partDesc = new ArrayList<PartitionDesc>();
-      List<String> partLocs = new ArrayList<String>();
+      List<Path> partLocs = new ArrayList<Path>();
       for (Partition part : partitions) {
-        partLocs.add(part.getLocation());
+        partLocs.add(part.getPartitionPath());
         partDesc.add(Utilities.getPartitionDesc(part));
       }
       work = new FetchWork(partLocs, partDesc, Utilities.getTableDesc(tbl));
       work.setLimit(100);
     } else {
-      work = new FetchWork(tbl.getDataLocation().toString(), Utilities.getTableDesc(tbl));
+      work = new FetchWork(new Path(tbl.getDataLocation()), Utilities.getTableDesc(tbl));
     }
     FetchTask task = new FetchTask();
     task.setWork(work);

Modified: hive/branches/tez/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/pom.xml?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/pom.xml (original)
+++ hive/branches/tez/pom.xml Thu Jan  2 02:42:15 2014
@@ -93,7 +93,7 @@
     <commons-lang.version>2.4</commons-lang.version>
     <commons-lang3.version>3.1</commons-lang3.version>
     <commons-logging.version>1.1.3</commons-logging.version>
-    <derby.version>10.4.2.0</derby.version>
+    <derby.version>10.10.1.1</derby.version>
     <guava.version>11.0.2</guava.version>
     <groovy.version>2.1.6</groovy.version>
     <hadoop-20.version>0.20.2</hadoop-20.version>

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Thu Jan  2 02:42:15 2014
@@ -193,7 +193,7 @@ public class ExplainTask extends Task<Ex
 
     PrintStream out = null;
     try {
-      Path resFile = new Path(work.getResFile());
+      Path resFile = work.getResFile();
       OutputStream outS = resFile.getFileSystem(conf).create(resFile);
       out = new PrintStream(outS);
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java Thu Jan  2 02:42:15 2014
@@ -299,7 +299,7 @@ public class FetchOperator implements Se
     if (iterPath == null) {
       if (work.isNotPartitioned()) {
         if (!tblDataDone) {
-          currPath = work.getTblDirPath();
+          currPath = work.getTblDir();
           currTbl = work.getTblDesc();
           if (isNativeTable) {
             FileSystem fs = currPath.getFileSystem(job);
@@ -326,7 +326,7 @@ public class FetchOperator implements Se
         }
         return;
       } else {
-        iterPath = FetchWork.convertStringToPathArray(work.getPartDir()).iterator();
+        iterPath = work.getPartDir().iterator();
         iterPartDesc = work.getPartDesc().iterator();
       }
     }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java Thu Jan  2 02:42:15 2014
@@ -205,7 +205,7 @@ public class MoveTask extends Task<MoveW
       LoadFileDesc lfd = work.getLoadFileWork();
       if (lfd != null) {
         Path targetPath = lfd.getTargetDir();
-        Path sourcePath = new Path(lfd.getSourceDir());
+        Path sourcePath = lfd.getSourcePath();
         moveFile(sourcePath, targetPath, lfd.getIsDfsDir());
       }
 
@@ -216,7 +216,7 @@ public class MoveTask extends Task<MoveW
         boolean isDfsDir = lmfd.getIsDfsDir();
         int i = 0;
         while (i <lmfd.getSourceDirs().size()) {
-          Path srcPath = new Path(lmfd.getSourceDirs().get(i));
+          Path srcPath = lmfd.getSourceDirs().get(i);
           Path destPath = lmfd.getTargetDirs().get(i);
           FileSystem fs = destPath.getFileSystem(conf);
           if (!fs.exists(destPath.getParent())) {
@@ -241,7 +241,7 @@ public class MoveTask extends Task<MoveW
           mesg.setLength(mesg.length()-2);
           mesg.append(')');
         }
-        String mesg_detail = " from " + tbd.getSourceDir();
+        String mesg_detail = " from " + tbd.getSourcePath();
         console.printInfo(mesg.toString(), mesg_detail);
         Table table = db.getTable(tbd.getTable().getTableName());
 
@@ -281,7 +281,7 @@ public class MoveTask extends Task<MoveW
         DataContainer dc = null;
         if (tbd.getPartitionSpec().size() == 0) {
           dc = new DataContainer(table.getTTable());
-          db.loadTable(new Path(tbd.getSourceDir()), tbd.getTable()
+          db.loadTable(tbd.getSourcePath(), tbd.getTable()
               .getTableName(), tbd.getReplace(), tbd.getHoldDDLTime());
           if (work.getOutputs() != null) {
             work.getOutputs().add(new WriteEntity(table));
@@ -294,7 +294,7 @@ public class MoveTask extends Task<MoveW
           List<SortCol> sortCols = null;
           int numBuckets = -1;
           Task task = this;
-          String path = tbd.getSourceDir();
+          String path = tbd.getSourcePath().toUri().toString();
           // Find the first ancestor of this MoveTask which is some form of map reduce task
           // (Either standard, local, or a merge)
           while (task.getParentTasks() != null && task.getParentTasks().size() == 1) {
@@ -330,7 +330,7 @@ public class MoveTask extends Task<MoveW
             // condition for merging is not met, see GenMRFileSink1.
             if (task instanceof MoveTask) {
               if (((MoveTask)task).getWork().getLoadFileWork() != null) {
-                path = ((MoveTask)task).getWork().getLoadFileWork().getSourceDir();
+                path = ((MoveTask)task).getWork().getLoadFileWork().getSourcePath().toUri().toString();
               }
             }
           }
@@ -354,7 +354,7 @@ public class MoveTask extends Task<MoveW
             // want to isolate any potential issue it may introduce.
             ArrayList<LinkedHashMap<String, String>> dp =
               db.loadDynamicPartitions(
-                  new Path(tbd.getSourceDir()),
+                  tbd.getSourcePath(),
                   tbd.getTable().getTableName(),
                 	tbd.getPartitionSpec(),
                 	tbd.getReplace(),
@@ -394,7 +394,7 @@ public class MoveTask extends Task<MoveW
               dc = new DataContainer(table.getTTable(), partn.getTPartition());
 
               if (SessionState.get() != null) {
-                SessionState.get().getLineageState().setLineage(tbd.getSourceDir(), dc,
+                SessionState.get().getLineageState().setLineage(tbd.getSourcePath(), dc,
                     table.getCols());
               }
 
@@ -405,7 +405,7 @@ public class MoveTask extends Task<MoveW
             List<String> partVals = MetaStoreUtils.getPvals(table.getPartCols(),
                 tbd.getPartitionSpec());
             db.validatePartitionNameCharacters(partVals);
-            db.loadPartition(new Path(tbd.getSourceDir()), tbd.getTable().getTableName(),
+            db.loadPartition(tbd.getSourcePath(), tbd.getTable().getTableName(),
                 tbd.getPartitionSpec(), tbd.getReplace(), tbd.getHoldDDLTime(),
                 tbd.getInheritTableSpecs(), isSkewedStoredAsDirs(tbd));
           	Partition partn = db.getPartition(table, tbd.getPartitionSpec(), false);
@@ -422,7 +422,7 @@ public class MoveTask extends Task<MoveW
          }
         }
         if (SessionState.get() != null && dc != null) {
-          SessionState.get().getLineageState().setLineage(tbd.getSourceDir(), dc,
+          SessionState.get().getLineageState().setLineage(tbd.getSourcePath(), dc,
               table.getCols());
         }
         releaseLocks(tbd);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Thu Jan  2 02:42:15 2014
@@ -750,6 +750,20 @@ public final class Utilities {
       output.writeString(token.getText());
     }
   }
+
+  private static class PathSerializer extends com.esotericsoftware.kryo.Serializer<Path> {
+
+    @Override
+    public void write(Kryo kryo, Output output, Path path) {
+      output.writeString(path.toUri().toString());
+    }
+
+    @Override
+    public Path read(Kryo kryo, Input input, Class<Path> type) {
+      return new Path(URI.create(input.readString()));
+    }
+  }
+
   private static void serializePlan(Object plan, OutputStream out, Configuration conf, boolean cloningPlan) {
     PerfLogger perfLogger = PerfLogger.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.SERIALIZE_PLAN);
@@ -891,6 +905,7 @@ public final class Utilities {
       kryo.setClassLoader(Thread.currentThread().getContextClassLoader());
       kryo.register(java.sql.Date.class, new SqlDateSerializer());
       kryo.register(java.sql.Timestamp.class, new TimestampSerializer());
+      kryo.register(Path.class, new PathSerializer());
       removeField(kryo, Operator.class, "colExprMap");
       removeField(kryo, ColumnInfo.class, "objectInspector");
       removeField(kryo, MapWork.class, "opParseCtxMap");
@@ -912,6 +927,7 @@ public final class Utilities {
       kryo.register(CommonToken.class, new CommonTokenSerializer());
       kryo.register(java.sql.Date.class, new SqlDateSerializer());
       kryo.register(java.sql.Timestamp.class, new TimestampSerializer());
+      kryo.register(Path.class, new PathSerializer());
       return kryo;
     };
   };

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java Thu Jan  2 02:42:15 2014
@@ -498,21 +498,22 @@ public class ExecDriver extends Task<Map
     ArrayList<String> paths = mWork.getPaths();
     ArrayList<PartitionDesc> parts = mWork.getPartitionDescs();
 
-    Path onePath = new Path(paths.get(0));
-    String tmpPath = context.getCtx().getExternalTmpFileURI(onePath.toUri());
-
+    List<Path> inputPaths = new ArrayList<Path>(paths.size());
+    for (String path : paths) {
+      inputPaths.add(new Path(path));
+    }
+    
+    String tmpPath = context.getCtx().getExternalTmpFileURI(inputPaths.get(0).toUri());
     Path partitionFile = new Path(tmpPath, ".partitions");
     ShimLoader.getHadoopShims().setTotalOrderPartitionFile(job, partitionFile);
-
     PartitionKeySampler sampler = new PartitionKeySampler();
 
     if (mWork.getSamplingType() == MapWork.SAMPLING_ON_PREV_MR) {
       console.printInfo("Use sampling data created in previous MR");
       // merges sampling data from previous MR and make paritition keys for total sort
-      for (String path : paths) {
-        Path inputPath = new Path(path);
-        FileSystem fs = inputPath.getFileSystem(job);
-        for (FileStatus status : fs.globStatus(new Path(inputPath, ".sampling*"))) {
+      for (Path path : inputPaths) {
+        FileSystem fs = path.getFileSystem(job);
+        for (FileStatus status : fs.globStatus(new Path(path, ".sampling*"))) {
           sampler.addSampleFile(status.getPath(), job);
         }
       }
@@ -524,9 +525,9 @@ public class ExecDriver extends Task<Map
       FetchWork fetchWork;
       if (!partDesc.isPartitioned()) {
         assert paths.size() == 1;
-        fetchWork = new FetchWork(paths.get(0), partDesc.getTableDesc());
+        fetchWork = new FetchWork(inputPaths.get(0), partDesc.getTableDesc());
       } else {
-        fetchWork = new FetchWork(paths, parts, partDesc.getTableDesc());
+        fetchWork = new FetchWork(inputPaths, parts, partDesc.getTableDesc());
       }
       fetchWork.setSource(ts);
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java Thu Jan  2 02:42:15 2014
@@ -254,8 +254,8 @@ public class BlockMergeTask extends Task
   }
 
   private void addInputPaths(JobConf job, MergeWork work) {
-    for (String path : work.getInputPaths()) {
-      FileInputFormat.addInputPath(job, new Path(path));
+    for (Path path : work.getInputPaths()) {
+      FileInputFormat.addInputPath(job, path);
     }
   }
 
@@ -291,7 +291,7 @@ public class BlockMergeTask extends Task
       printUsage();
     }
 
-    List<String> inputPaths = new ArrayList<String>();
+    List<Path> inputPaths = new ArrayList<Path>();
     String[] paths = inputPathStr.split(INPUT_SEPERATOR);
     if (paths == null || paths.length == 0) {
       printUsage();
@@ -309,10 +309,10 @@ public class BlockMergeTask extends Task
         if (fstatus.isDir()) {
           FileStatus[] fileStatus = fs.listStatus(pathObj);
           for (FileStatus st : fileStatus) {
-            inputPaths.add(st.getPath().toString());
+            inputPaths.add(st.getPath());
           }
         } else {
-          inputPaths.add(fstatus.getPath().toString());
+          inputPaths.add(fstatus.getPath());
         }
       } catch (IOException e) {
         e.printStackTrace(System.err);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/MergeWork.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/MergeWork.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/MergeWork.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/MergeWork.java Thu Jan  2 02:42:15 2014
@@ -44,7 +44,7 @@ public class MergeWork extends MapWork i
 
   private static final long serialVersionUID = 1L;
 
-  private List<String> inputPaths;
+  private transient List<Path> inputPaths;
   private transient Path outputDir;
   private boolean hasDynamicPartitions;
   private DynamicPartitionCtx dynPartCtx;
@@ -54,11 +54,11 @@ public class MergeWork extends MapWork i
   public MergeWork() {
   }
 
-  public MergeWork(List<String> inputPaths, Path outputDir) {
+  public MergeWork(List<Path> inputPaths, Path outputDir) {
     this(inputPaths, outputDir, false, null);
   }
 
-  public MergeWork(List<String> inputPaths, Path outputDir,
+  public MergeWork(List<Path> inputPaths, Path outputDir,
       boolean hasDynamicPartitions, DynamicPartitionCtx dynPartCtx) {
     super();
     this.inputPaths = inputPaths;
@@ -70,16 +70,16 @@ public class MergeWork extends MapWork i
     if(this.getPathToPartitionInfo() == null) {
       this.setPathToPartitionInfo(new LinkedHashMap<String, PartitionDesc>());
     }
-    for(String path: this.inputPaths) {
-      this.getPathToPartitionInfo().put(path, partDesc);
+    for(Path path: this.inputPaths) {
+      this.getPathToPartitionInfo().put(path.toString(), partDesc);
     }
   }
 
-  public List<String> getInputPaths() {
+  public List<Path> getInputPaths() {
     return inputPaths;
   }
 
-  public void setInputPaths(List<String> inputPaths) {
+  public void setInputPaths(List<Path> inputPaths) {
     this.inputPaths = inputPaths;
   }
 
@@ -133,7 +133,7 @@ public class MergeWork extends MapWork i
     super.resolveDynamicPartitionStoredAsSubDirsMerge(conf, path, tblDesc, aliases, partDesc);
 
     // Add the DP path to the list of input paths
-    inputPaths.add(path.toString());
+    inputPaths.add(path);
   }
 
   /**
@@ -148,18 +148,17 @@ public class MergeWork extends MapWork i
       // use sub-dir as inputpath.
       assert ((this.inputPaths != null) && (this.inputPaths.size() == 1)) :
         "alter table ... concatenate should only have one directory inside inputpaths";
-      String dirName = inputPaths.get(0);
-      Path dirPath = new Path(dirName);
+      Path dirPath = inputPaths.get(0);
       try {
         FileSystem inpFs = dirPath.getFileSystem(conf);
         FileStatus[] status = HiveStatsUtils.getFileStatusRecurse(dirPath, listBucketingCtx
             .getSkewedColNames().size(), inpFs);
-        List<String> newInputPath = new ArrayList<String>();
+        List<Path> newInputPath = new ArrayList<Path>();
         boolean succeed = true;
         for (int i = 0; i < status.length; ++i) {
            if (status[i].isDir()) {
              // Add the lb path to the list of input paths
-             newInputPath.add(status[i].getPath().toString());
+             newInputPath.add(status[i].getPath());
            } else {
              // find file instead of dir. dont change inputpath
              succeed = false;
@@ -173,7 +172,7 @@ public class MergeWork extends MapWork i
           inputPaths.addAll(newInputPath);
         }
       } catch (IOException e) {
-        String msg = "Fail to get filesystem for directory name : " + dirName;
+        String msg = "Fail to get filesystem for directory name : " + dirPath.toUri();
         throw new RuntimeException(msg, e);
       }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java Thu Jan  2 02:42:15 2014
@@ -259,8 +259,8 @@ public class PartialScanTask extends Tas
   }
 
   private void addInputPaths(JobConf job, PartialScanWork work) {
-    for (String path : work.getInputPaths()) {
-      FileInputFormat.addInputPath(job, new Path(path));
+    for (Path path : work.getInputPaths()) {
+      FileInputFormat.addInputPath(job, path);
     }
   }
 
@@ -296,7 +296,7 @@ public class PartialScanTask extends Tas
       printUsage();
     }
 
-    List<String> inputPaths = new ArrayList<String>();
+    List<Path> inputPaths = new ArrayList<Path>();
     String[] paths = inputPathStr.split(INPUT_SEPERATOR);
     if (paths == null || paths.length == 0) {
       printUsage();
@@ -314,10 +314,10 @@ public class PartialScanTask extends Tas
         if (fstatus.isDir()) {
           FileStatus[] fileStatus = fs.listStatus(pathObj);
           for (FileStatus st : fileStatus) {
-            inputPaths.add(st.getPath().toString());
+            inputPaths.add(st.getPath());
           }
         } else {
-          inputPaths.add(fstatus.getPath().toString());
+          inputPaths.add(fstatus.getPath());
         }
       } catch (IOException e) {
         e.printStackTrace(System.err);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java Thu Jan  2 02:42:15 2014
@@ -22,6 +22,7 @@ import java.io.Serializable;
 import java.util.LinkedHashMap;
 import java.util.List;
 
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
 import org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat;
 import org.apache.hadoop.hive.ql.plan.Explain;
@@ -38,13 +39,13 @@ public class PartialScanWork extends Map
 
   private static final long serialVersionUID = 1L;
 
-  private List<String> inputPaths;
+  private transient List<Path> inputPaths;
   private String aggKey;
 
   public PartialScanWork() {
   }
 
-  public PartialScanWork(List<String> inputPaths) {
+  public PartialScanWork(List<Path> inputPaths) {
     super();
     this.inputPaths = inputPaths;
     PartitionDesc partDesc = new PartitionDesc();
@@ -52,16 +53,16 @@ public class PartialScanWork extends Map
     if(this.getPathToPartitionInfo() == null) {
       this.setPathToPartitionInfo(new LinkedHashMap<String, PartitionDesc>());
     }
-    for(String path: this.inputPaths) {
-      this.getPathToPartitionInfo().put(path, partDesc);
+    for(Path path: this.inputPaths) {
+      this.getPathToPartitionInfo().put(path.toString(), partDesc);
     }
   }
 
-  public List<String> getInputPaths() {
+  public List<Path> getInputPaths() {
     return inputPaths;
   }
 
-  public void setInputPaths(List<String> inputPaths) {
+  public void setInputPaths(List<Path> inputPaths) {
     this.inputPaths = inputPaths;
   }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java Thu Jan  2 02:42:15 2014
@@ -78,12 +78,12 @@ public class ColumnTruncateMapper extend
     jc = job;
     work = (ColumnTruncateWork) Utilities.getMapWork(job);
 
-    String specPath = work.getOutputDir();
+    Path specPath = work.getOutputDir();
     Path tmpPath = Utilities.toTempPath(specPath);
     Path taskTmpPath = Utilities.toTaskTempPath(specPath);
     updatePaths(tmpPath, taskTmpPath);
     try {
-      fs = (new Path(specPath)).getFileSystem(job);
+      fs = specPath.getFileSystem(job);
       autoDelete = fs.deleteOnExit(outPath);
     } catch (IOException e) {
       this.exception = true;
@@ -229,13 +229,12 @@ public class ColumnTruncateMapper extend
     }
   }
 
-  public static void jobClose(String outputPath, boolean success, JobConf job,
+  public static void jobClose(Path outputPath, boolean success, JobConf job,
       LogHelper console, DynamicPartitionCtx dynPartCtx, Reporter reporter
       ) throws HiveException, IOException {
-    Path outpath = new Path(outputPath);
-    FileSystem fs = outpath.getFileSystem(job);
-    Path backupPath = backupOutputPath(fs, outpath, job);
-    Utilities.mvFileToFinalPath(outputPath, job, success, LOG, dynPartCtx, null,
+    FileSystem fs = outputPath.getFileSystem(job);
+    Path backupPath = backupOutputPath(fs, outputPath, job);
+    Utilities.mvFileToFinalPath(outputPath.toUri().toString(), job, success, LOG, dynPartCtx, null,
       reporter);
     fs.delete(backupPath, true);
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java Thu Jan  2 02:42:15 2014
@@ -130,8 +130,8 @@ public class ColumnTruncateTask extends 
       throw new RuntimeException(e.getMessage());
     }
 
-    String outputPath = this.work.getOutputDir();
-    Path tempOutPath = Utilities.toTempPath(new Path(outputPath));
+    Path outputPath = this.work.getOutputDir();
+    Path tempOutPath = Utilities.toTempPath(outputPath);
     try {
       FileSystem fs = tempOutPath.getFileSystem(job);
       if (!fs.exists(tempOutPath)) {
@@ -230,7 +230,7 @@ public class ColumnTruncateTask extends 
   }
 
   private void addInputPaths(JobConf job, ColumnTruncateWork work) {
-    FileInputFormat.addInputPath(job, new Path(work.getInputDir()));
+    FileInputFormat.addInputPath(job, work.getInputDir());
   }
 
   @Override

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateWork.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateWork.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateWork.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateWork.java Thu Jan  2 02:42:15 2014
@@ -22,6 +22,7 @@ import java.io.Serializable;
 import java.util.LinkedHashMap;
 import java.util.List;
 
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
 import org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat;
 import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
@@ -36,8 +37,8 @@ public class ColumnTruncateWork extends 
 
   private static final long serialVersionUID = 1L;
 
-  private String inputDir;
-  private String outputDir;
+  private transient Path inputDir;
+  private Path outputDir;
   private boolean hasDynamicPartitions;
   private DynamicPartitionCtx dynPartCtx;
   private boolean isListBucketingAlterTableConcatenate;
@@ -47,11 +48,11 @@ public class ColumnTruncateWork extends 
   public ColumnTruncateWork() {
   }
 
-  public ColumnTruncateWork(List<Integer> droppedColumns, String inputDir, String outputDir) {
+  public ColumnTruncateWork(List<Integer> droppedColumns, Path inputDir, Path outputDir) {
     this(droppedColumns, inputDir, outputDir, false, null);
   }
 
-  public ColumnTruncateWork(List<Integer> droppedColumns, String inputDir, String outputDir,
+  public ColumnTruncateWork(List<Integer> droppedColumns, Path inputDir, Path outputDir,
       boolean hasDynamicPartitions, DynamicPartitionCtx dynPartCtx) {
     super();
     this.droppedColumns = droppedColumns;
@@ -64,22 +65,22 @@ public class ColumnTruncateWork extends 
     if(this.getPathToPartitionInfo() == null) {
       this.setPathToPartitionInfo(new LinkedHashMap<String, PartitionDesc>());
     }
-    this.getPathToPartitionInfo().put(inputDir, partDesc);
+    this.getPathToPartitionInfo().put(inputDir.toString(), partDesc);
   }
 
-  public String getInputDir() {
+  public Path getInputDir() {
     return inputDir;
   }
 
-  public void setInputPaths(String inputDir) {
+  public void setInputPaths(Path inputDir) {
     this.inputDir = inputDir;
   }
 
-  public String getOutputDir() {
+  public Path getOutputDir() {
     return outputDir;
   }
 
-  public void setOutputDir(String outputDir) {
+  public void setOutputDir(Path outputDir) {
     this.outputDir = outputDir;
   }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java Thu Jan  2 02:42:15 2014
@@ -442,6 +442,8 @@ public class ZooKeeperHiveLockManager im
         break;
       } catch (Exception e) {
         if (tryNum >= numRetriesForUnLock) {
+          String name = ((ZooKeeperHiveLock)hiveLock).getPath();
+          LOG.error("Node " + name + " can not be deleted after " + numRetriesForUnLock + " attempts.");  
           throw new LockException(e);
         }
       }
@@ -455,23 +457,28 @@ public class ZooKeeperHiveLockManager im
   static void unlockPrimitive(HiveConf conf, ZooKeeper zkpClient,
                              HiveLock hiveLock, String parent) throws LockException {
     ZooKeeperHiveLock zLock = (ZooKeeperHiveLock)hiveLock;
+    HiveLockObject obj = zLock.getHiveLockObject();
+    String name  = getLastObjectName(parent, obj);
     try {
-      // can throw KeeperException.NoNodeException, which might mean something is wrong
       zkpClient.delete(zLock.getPath(), -1);
 
       // Delete the parent node if all the children have been deleted
-      HiveLockObject obj = zLock.getHiveLockObject();
-      String name  = getLastObjectName(parent, obj);
-
-      try {
-        List<String> children = zkpClient.getChildren(name, false);
-        if (children == null || children.isEmpty()) {
-          zkpClient.delete(name, -1);
-        }
-      } catch (KeeperException.NoNodeException e) {
-        LOG.debug("Node " + name + " previously deleted when attempting to delete.");
-      }
+      List<String> children = zkpClient.getChildren(name, false);
+      if (children == null || children.isEmpty()) {
+        zkpClient.delete(name, -1);
+      }
+    } catch (KeeperException.NoNodeException nne) {
+      //can happen in retrying deleting the zLock after exceptions like InterruptedException 
+      //or in a race condition where parent has already been deleted by other process when it
+      //is to be deleted. Both cases should not raise error
+      LOG.debug("Node " + zLock.getPath() + " or its parent has already been deleted.");
+    } catch (KeeperException.NotEmptyException nee) {
+      //can happen in a race condition where another process adds a zLock under this parent
+      //just before it is about to be deleted. It should not be a problem since this parent
+      //can eventually be deleted by the process which hold its last child zLock
+      LOG.debug("Node " + name + " to be deleted is not empty.");  
     } catch (Exception e) {
+      //exceptions including InterruptException and other KeeperException
       LOG.error("Failed to release ZooKeeper lock: ", e);
       throw new LockException(e);
     }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java Thu Jan  2 02:42:15 2014
@@ -26,6 +26,7 @@ import java.util.Map;
 import java.util.Set;
 import java.util.Stack;
 
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -173,10 +174,10 @@ public class GenMRTableScan1 implements 
       Task<? extends Serializable> currTask, QBParseInfo parseInfo, StatsWork statsWork,
       Task<StatsWork> statsTask) throws SemanticException {
     String aggregationKey = op.getConf().getStatsAggPrefix();
-    List<String> inputPaths = new ArrayList<String>();
+    List<Path> inputPaths = new ArrayList<Path>();
     switch (parseInfo.getTableSpec().specType) {
     case TABLE_ONLY:
-      inputPaths.add(parseInfo.getTableSpec().tableHandle.getPath().toString());
+      inputPaths.add(parseInfo.getTableSpec().tableHandle.getPath());
       break;
     case STATIC_PARTITION:
       Partition part = parseInfo.getTableSpec().partHandle;
@@ -186,7 +187,7 @@ public class GenMRTableScan1 implements 
         throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_AGGKEY.getMsg(
             part.getPartitionPath().toString() + e.getMessage()));
       }
-      inputPaths.add(part.getPartitionPath().toString());
+      inputPaths.add(part.getPartitionPath());
       break;
     default:
       assert false;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java Thu Jan  2 02:42:15 2014
@@ -732,10 +732,10 @@ public final class GenMapRedUtils {
         tblDesc = Utilities.getTableDesc(partsList.getSourceTable());
         localPlan.getAliasToFetchWork().put(
             alias_id,
-            new FetchWork(FetchWork.convertPathToStringArray(partDir), partDesc, tblDesc));
+            new FetchWork(partDir, partDesc, tblDesc));
       } else {
         localPlan.getAliasToFetchWork().put(alias_id,
-            new FetchWork(tblDir.toString(), tblDesc));
+            new FetchWork(tblDir, tblDesc));
       }
       plan.setMapLocalWork(localPlan);
     }
@@ -782,7 +782,7 @@ public final class GenMapRedUtils {
       assert localPlan.getAliasToWork().get(alias) == null;
       assert localPlan.getAliasToFetchWork().get(alias) == null;
       localPlan.getAliasToWork().put(alias, topOp);
-      localPlan.getAliasToFetchWork().put(alias, new FetchWork(alias, tt_desc));
+      localPlan.getAliasToFetchWork().put(alias, new FetchWork(new Path(alias), tt_desc));
       plan.setMapLocalWork(localPlan);
     }
   }
@@ -1487,17 +1487,19 @@ public final class GenMapRedUtils {
     TableDesc tblDesc = fsInputDesc.getTableInfo();
 
     if (tblDesc.getInputFileFormatClass().equals(RCFileInputFormat.class)) {
-      ArrayList<String> inputDirs = new ArrayList<String>();
+      ArrayList<Path> inputDirs = new ArrayList<Path>(1);
+      ArrayList<String> inputDirstr = new ArrayList<String>(1);
       if (!hasDynamicPartitions
           && !GenMapRedUtils.isSkewedStoredAsDirs(fsInputDesc)) {
-        inputDirs.add(inputDir);
+        inputDirs.add(new Path(inputDir));
+        inputDirstr.add(inputDir);
       }
 
       MergeWork work = new MergeWork(inputDirs, finalName,
           hasDynamicPartitions, fsInputDesc.getDynPartCtx());
       LinkedHashMap<String, ArrayList<String>> pathToAliases =
           new LinkedHashMap<String, ArrayList<String>>();
-      pathToAliases.put(inputDir, (ArrayList<String>) inputDirs.clone());
+      pathToAliases.put(inputDir, (ArrayList<String>) inputDirstr.clone());
       work.setMapperCannotSpanPartns(true);
       work.setPathToAliases(pathToAliases);
       work.setAliasToWork(
@@ -1593,16 +1595,15 @@ public final class GenMapRedUtils {
     // find the move task
     for (Task<MoveWork> mvTsk : mvTasks) {
       MoveWork mvWork = mvTsk.getWork();
-      String srcDir = null;
+      Path srcDir = null;
       if (mvWork.getLoadFileWork() != null) {
-        srcDir = mvWork.getLoadFileWork().getSourceDir();
+        srcDir = mvWork.getLoadFileWork().getSourcePath();
       } else if (mvWork.getLoadTableWork() != null) {
-        srcDir = mvWork.getLoadTableWork().getSourceDir();
+        srcDir = mvWork.getLoadTableWork().getSourcePath();
       }
 
-      String fsOpDirName = fsOp.getConf().getFinalDirName();
       if ((srcDir != null)
-          && (srcDir.equalsIgnoreCase(fsOpDirName))) {
+          && (srcDir.equals(new Path(fsOp.getConf().getFinalDirName())))) {
         return mvTsk;
       }
     }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java Thu Jan  2 02:42:15 2014
@@ -31,6 +31,7 @@ import java.util.Stack;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator;
@@ -172,9 +173,7 @@ public class MapJoinProcessor implements
         ArrayList<String> list = entry2.getValue();
         if (list.contains(alias)) {
           // add to path set
-          if (!pathSet.contains(path)) {
-            pathSet.add(path);
-          }
+          pathSet.add(path);
           //remove this alias from the alias list
           list.remove(alias);
           if(list.size() == 0) {
@@ -189,18 +188,18 @@ public class MapJoinProcessor implements
 
       // create fetch work
       FetchWork fetchWork = null;
-      List<String> partDir = new ArrayList<String>();
+      List<Path> partDir = new ArrayList<Path>();
       List<PartitionDesc> partDesc = new ArrayList<PartitionDesc>();
 
       for (String tablePath : pathSet) {
         PartitionDesc partitionDesc = newWork.getMapWork().getPathToPartitionInfo().get(tablePath);
         // create fetchwork for non partitioned table
         if (partitionDesc.getPartSpec() == null || partitionDesc.getPartSpec().size() == 0) {
-          fetchWork = new FetchWork(tablePath, partitionDesc.getTableDesc());
+          fetchWork = new FetchWork(new Path(tablePath), partitionDesc.getTableDesc());
           break;
         }
         // if table is partitioned,add partDir and partitionDesc
-        partDir.add(tablePath);
+        partDir.add(new Path(tablePath));
         partDesc.add(partitionDesc);
       }
       // create fetchwork for partitioned table

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java Thu Jan  2 02:42:15 2014
@@ -232,18 +232,17 @@ public class SimpleFetchOptimizer implem
       inputs.clear();
       if (!table.isPartitioned()) {
         inputs.add(new ReadEntity(table));
-        String path = table.getPath().toString();
-        FetchWork work = new FetchWork(path, Utilities.getTableDesc(table));
+        FetchWork work = new FetchWork(table.getPath(), Utilities.getTableDesc(table));
         PlanUtils.configureInputJobPropertiesForStorageHandler(work.getTblDesc());
         work.setSplitSample(splitSample);
         return work;
       }
-      List<String> listP = new ArrayList<String>();
+      List<Path> listP = new ArrayList<Path>();
       List<PartitionDesc> partP = new ArrayList<PartitionDesc>();
 
       for (Partition partition : partsList.getNotDeniedPartns()) {
         inputs.add(new ReadEntity(partition));
-        listP.add(partition.getPartitionPath().toString());
+        listP.add(partition.getPartitionPath());
         partP.add(Utilities.getPartitionDesc(partition));
       }
       Table sourceTable = partsList.getSourceTable();

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java Thu Jan  2 02:42:15 2014
@@ -308,7 +308,7 @@ public final class GenMRSkewJoinProcesso
         localPlan.getAliasToWork().put(small_alias.toString(), tblScan_op2);
         Path tblDir = new Path(smallTblDirs.get(small_alias));
         localPlan.getAliasToFetchWork().put(small_alias.toString(),
-            new FetchWork(tblDir.toString(), tableDescList.get(small_alias)));
+            new FetchWork(tblDir, tableDescList.get(small_alias)));
       }
 
       newPlan.setMapLocalWork(localPlan);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java Thu Jan  2 02:42:15 2014
@@ -26,6 +26,7 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
@@ -118,10 +119,10 @@ public class SortMergeJoinTaskDispatcher
 
       PartitionDesc partitionInfo = currWork.getAliasToPartnInfo().get(alias);
       if (fetchWork.getTblDir() != null) {
-        currWork.mergeAliasedInput(alias, fetchWork.getTblDir(), partitionInfo);
+        currWork.mergeAliasedInput(alias, fetchWork.getTblDir().toUri().toString(), partitionInfo);
       } else {
-        for (String pathDir : fetchWork.getPartDir()) {
-          currWork.mergeAliasedInput(alias, pathDir, partitionInfo);
+        for (Path pathDir : fetchWork.getPartDir()) {
+          currWork.mergeAliasedInput(alias, pathDir.toUri().toString(), partitionInfo);
         }
       }
     }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java Thu Jan  2 02:42:15 2014
@@ -33,7 +33,7 @@ public class AlterTablePartMergeFilesDes
   private HashMap<String, String> partSpec;
   private ListBucketingCtx lbCtx; // context for list bucketing.
 
-  private List<String> inputDir = new ArrayList<String>();
+  private List<Path> inputDir = new ArrayList<Path>();
   private Path outputDir = null;
 
   public AlterTablePartMergeFilesDesc(String tableName,
@@ -68,11 +68,11 @@ public class AlterTablePartMergeFilesDes
     this.outputDir = outputDir;
   }
 
-  public List<String> getInputDir() {
+  public List<Path> getInputDir() {
     return inputDir;
   }
 
-  public void setInputDir(List<String> inputDir) {
+  public void setInputDir(List<Path> inputDir) {
     this.inputDir = inputDir;
   }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Thu Jan  2 02:42:15 2014
@@ -1000,7 +1000,7 @@ public class DDLSemanticAnalyzer extends
 
         truncateTblDesc.setColumnIndexes(new ArrayList<Integer>(columnIndexes));
 
-        truncateTblDesc.setInputDir(oldTblPartLoc.toString());
+        truncateTblDesc.setInputDir(oldTblPartLoc);
         addInputsOutputsAlterTable(tableName, partSpec);
 
         truncateTblDesc.setLbCtx(lbCtx);
@@ -1011,8 +1011,8 @@ public class DDLSemanticAnalyzer extends
         // Write the output to temporary directory and move it to the final location at the end
         // so the operation is atomic.
         String queryTmpdir = ctx.getExternalTmpFileURI(newTblPartLoc.toUri());
-        truncateTblDesc.setOutputDir(queryTmpdir);
-        LoadTableDesc ltd = new LoadTableDesc(new Path(queryTmpdir), queryTmpdir, tblDesc,
+        truncateTblDesc.setOutputDir(new Path(queryTmpdir));
+        LoadTableDesc ltd = new LoadTableDesc(new Path(queryTmpdir), tblDesc,
             partSpec == null ? new HashMap<String, String>() : partSpec);
         ltd.setLbCtx(lbCtx);
         Task<MoveWork> moveTsk = TaskFactory.get(new MoveWork(null, null, ltd, null, false),
@@ -1534,7 +1534,7 @@ public class DDLSemanticAnalyzer extends
     AlterTablePartMergeFilesDesc mergeDesc = new AlterTablePartMergeFilesDesc(
         tableName, partSpec);
 
-    List<String> inputDir = new ArrayList<String>();
+    List<Path> inputDir = new ArrayList<Path>();
     Path oldTblPartLoc = null;
     Path newTblPartLoc = null;
     Table tblObj = null;
@@ -1614,7 +1614,7 @@ public class DDLSemanticAnalyzer extends
             "Merge can not perform on archived partitions.");
       }
 
-      inputDir.add(oldTblPartLoc.toString());
+      inputDir.add(oldTblPartLoc);
 
       mergeDesc.setInputDir(inputDir);
 
@@ -1627,7 +1627,7 @@ public class DDLSemanticAnalyzer extends
       TableDesc tblDesc = Utilities.getTableDesc(tblObj);
       String queryTmpdir = ctx.getExternalTmpFileURI(newTblPartLoc.toUri());
       mergeDesc.setOutputDir(new Path(queryTmpdir));
-      LoadTableDesc ltd = new LoadTableDesc(new Path(queryTmpdir), queryTmpdir, tblDesc,
+      LoadTableDesc ltd = new LoadTableDesc(new Path(queryTmpdir), tblDesc,
           partSpec == null ? new HashMap<String, String>() : partSpec);
       ltd.setLbCtx(lbCtx);
       Task<MoveWork> moveTsk = TaskFactory.get(new MoveWork(null, null, ltd, null, false),
@@ -1948,7 +1948,7 @@ public class DDLSemanticAnalyzer extends
     prop.setProperty("columns", colTypes[0]);
     prop.setProperty("columns.types", colTypes[1]);
     prop.setProperty(serdeConstants.SERIALIZATION_LIB, LazySimpleSerDe.class.getName());
-    FetchWork fetch = new FetchWork(ctx.getResFile().toString(), new TableDesc(
+    FetchWork fetch = new FetchWork(ctx.getResFile(), new TableDesc(
         TextInputFormat.class,IgnoreKeyTextOutputFormat.class, prop), -1);
     fetch.setSerializationNullFormat(" ");
     return (FetchTask) TaskFactory.get(fetch, conf);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java Thu Jan  2 02:42:15 2014
@@ -77,7 +77,7 @@ public class ExplainSemanticAnalyzer ext
       pCtx = ((SemanticAnalyzer)sem).getParseContext();
     }
 
-    ExplainWork work = new ExplainWork(ctx.getResFile().toString(),
+    ExplainWork work = new ExplainWork(ctx.getResFile(),
         pCtx,
         tasks,
         fetchTask,

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java Thu Jan  2 02:42:15 2014
@@ -285,7 +285,6 @@ public class ImportSemanticAnalyzer exte
     Task<?> copyTask = TaskFactory.get(new CopyWork(dataPath,
        tmpPath, false), conf);
     LoadTableDesc loadTableWork = new LoadTableDesc(tmpPath,
-        ctx.getExternalTmpFileURI(fromURI),
         Utilities.getTableDesc(table), new TreeMap<String, String>(),
         false);
     Task<?> loadTableTask = TaskFactory.get(new MoveWork(getInputs(),
@@ -332,7 +331,6 @@ public class ImportSemanticAnalyzer exte
       Task<?> addPartTask = TaskFactory.get(new DDLWork(getInputs(),
           getOutputs(), addPartitionDesc), conf);
       LoadTableDesc loadTableWork = new LoadTableDesc(tmpPath,
-          ctx.getExternalTmpFileURI(fromURI),
           Utilities.getTableDesc(table),
           addPartitionDesc.getPartSpec(), true);
       loadTableWork.setInheritTableSpecs(false);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java Thu Jan  2 02:42:15 2014
@@ -259,7 +259,7 @@ public class LoadSemanticAnalyzer extend
 
     LoadTableDesc loadTableWork;
     loadTableWork = new LoadTableDesc(new Path(fromURI),
-          loadTmpPath, Utilities.getTableDesc(ts.tableHandle), partSpec, isOverWrite);
+      Utilities.getTableDesc(ts.tableHandle), partSpec, isOverWrite);
 
     Task<? extends Serializable> childTask = TaskFactory.get(new MoveWork(getInputs(),
         getOutputs(), loadTableWork, null, true), conf);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Thu Jan  2 02:42:15 2014
@@ -5355,8 +5355,7 @@ public class SemanticAnalyzer extends Ba
       // Create the work for moving the table
       // NOTE: specify Dynamic partitions in dest_tab for WriteEntity
       if (!isNonNativeTable) {
-        ltd = new LoadTableDesc(new Path(queryTmpdir), ctx.getExternalTmpFileURI(dest_path.toUri()),
-            table_desc, dpCtx);
+        ltd = new LoadTableDesc(new Path(queryTmpdir),table_desc, dpCtx);
         ltd.setReplace(!qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),
             dest_tab.getTableName()));
         ltd.setLbCtx(lbCtx);
@@ -5439,8 +5438,7 @@ public class SemanticAnalyzer extends Ba
       lbCtx = constructListBucketingCtx(dest_part.getSkewedColNames(),
           dest_part.getSkewedColValues(), dest_part.getSkewedColValueLocationMaps(),
           dest_part.isStoredAsSubDirectories(), conf);
-      ltd = new LoadTableDesc(new Path(queryTmpdir), ctx.getExternalTmpFileURI(dest_path.toUri()),
-          table_desc, dest_part.getSpec());
+      ltd = new LoadTableDesc(new Path(queryTmpdir), table_desc, dest_part.getSpec());
       ltd.setReplace(!qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),
           dest_tab.getTableName()));
       ltd.setLbCtx(lbCtx);
@@ -5658,7 +5656,7 @@ public class SemanticAnalyzer extends Ba
 
     if (ltd != null && SessionState.get() != null) {
       SessionState.get().getLineageState()
-          .mapDirToFop(ltd.getSourceDir(), (FileSinkOperator) output);
+          .mapDirToFop(ltd.getSourcePath(), (FileSinkOperator) output);
     }
 
     if (LOG.isDebugEnabled()) {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java Thu Jan  2 02:42:15 2014
@@ -64,7 +64,7 @@ import org.apache.hadoop.hive.ql.session
  */
 public abstract class TaskCompiler {
 
-  protected final Log LOG = LogFactory.getLog(TezCompiler.class);
+  protected final Log LOG = LogFactory.getLog(TaskCompiler.class);
 
   protected Hive db;
   protected LogHelper console;
@@ -117,7 +117,7 @@ public abstract class TaskCompiler {
         resultTab = PlanUtils.getDefaultQueryOutputTableDesc(cols, colTypes, resFileFormat);
       }
 
-      FetchWork fetch = new FetchWork(new Path(loadFileDesc.getSourceDir()).toString(),
+      FetchWork fetch = new FetchWork(loadFileDesc.getSourcePath(),
                                       resultTab, qb.getParseInfo().getOuterQueryLimit());
       fetch.setSource(pCtx.getFetchSource());
       fetch.setSink(pCtx.getFetchSink());
@@ -297,7 +297,7 @@ public abstract class TaskCompiler {
     String resFileFormat = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYRESULTFILEFORMAT);
     TableDesc resultTab = PlanUtils.getDefaultQueryOutputTableDesc(cols, colTypes, resFileFormat);
 
-    fetch = new FetchWork(new Path(loadFileWork.get(0).getSourceDir()).toString(),
+    fetch = new FetchWork(loadFileWork.get(0).getSourcePath(),
         resultTab, qb.getParseInfo().getOuterQueryLimit());
 
     ColumnStatsDesc cStatsDesc = new ColumnStatsDesc(tableName, partName,

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ArchiveWork.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ArchiveWork.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ArchiveWork.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ArchiveWork.java Thu Jan  2 02:42:15 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
-import java.util.LinkedHashMap;
 
 /**
  * ArchiveWork.
@@ -28,9 +27,6 @@ import java.util.LinkedHashMap;
 @Explain(displayName = "Map Reduce")
 public class ArchiveWork implements Serializable {
   private static final long serialVersionUID = 1L;
-  private String tableName;
-  private String dbName;
-  private LinkedHashMap<String, String> partSpec;
   private ArchiveActionType type;
 
   public static enum ArchiveActionType {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java Thu Jan  2 02:42:15 2014
@@ -254,7 +254,7 @@ public class ConditionalResolverMergeFil
     long totalSz = 0;
     boolean doMerge = false;
     // list of paths that don't need to merge but need to move to the dest location
-    List<String> toMove = new ArrayList<String>();
+    List<Path> toMove = new ArrayList<Path>();
     for (int i = 0; i < status.length; ++i) {
       long len = getMergeSize(inpFs, status[i].getPath(), avgConditionSize);
       if (len >= 0) {
@@ -265,7 +265,7 @@ public class ConditionalResolverMergeFil
         work.resolveDynamicPartitionStoredAsSubDirsMerge(conf, status[i].getPath(), tblDesc,
             aliases, pDesc);
       } else {
-        toMove.add(status[i].getPath().toString());
+        toMove.add(status[i].getPath());
       }
     }
     if (doMerge) {
@@ -289,11 +289,7 @@ public class ConditionalResolverMergeFil
         List<Path> targetDirs = new ArrayList<Path>(toMove.size());
 
         for (int i = 0; i < toMove.size(); i++) {
-          String toMoveStr = toMove.get(i);
-          if (toMoveStr.endsWith(Path.SEPARATOR)) {
-            toMoveStr = toMoveStr.substring(0, toMoveStr.length() - 1);
-          }
-          String[] moveStrSplits = toMoveStr.split(Path.SEPARATOR);
+          String[] moveStrSplits = toMove.get(i).toUri().toString().split(Path.SEPARATOR);
           int dpIndex = moveStrSplits.length - dpLbLevel;
           Path target = targetDir;
           while (dpIndex < moveStrSplits.length) {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java Thu Jan  2 02:42:15 2014
@@ -45,21 +45,13 @@ public class CopyWork implements Seriali
     this.toPath = toPath;
     this.setErrorOnSrcEmpty(errorOnSrcEmpty);
   }
-
-  @Explain(displayName = "source")
-  public String getFromPathAsString() {
-    return fromPath.toUri().toString();
-  }
   
-  @Explain(displayName = "destination")
-  public String getToPathAsString() {
-    return toPath.toUri().toString();
-  }
-
+  @Explain(displayName = "source")
   public Path getFromPath() {
     return fromPath;
   }
 
+  @Explain(displayName = "destination")
   public Path getToPath() {
     return toPath;
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java Thu Jan  2 02:42:15 2014
@@ -23,6 +23,7 @@ import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.parse.ParseContext;
@@ -34,7 +35,7 @@ import org.apache.hadoop.hive.ql.parse.P
 public class ExplainWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
-  private String resFile;
+  private Path resFile;
   private ArrayList<Task<? extends Serializable>> rootTasks;
   private Task<? extends Serializable> fetchTask;
   private String astStringTree;
@@ -52,7 +53,7 @@ public class ExplainWork implements Seri
   public ExplainWork() {
   }
 
-  public ExplainWork(String resFile,
+  public ExplainWork(Path resFile,
       ParseContext pCtx,
       List<Task<? extends Serializable>> rootTasks,
       Task<? extends Serializable> fetchTask,
@@ -74,11 +75,11 @@ public class ExplainWork implements Seri
     this.pCtx = pCtx;
   }
 
-  public String getResFile() {
+  public Path getResFile() {
     return resFile;
   }
 
-  public void setResFile(String resFile) {
+  public void setResFile(Path resFile) {
     this.resFile = resFile;
   }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java Thu Jan  2 02:42:15 2014
@@ -37,10 +37,10 @@ import org.apache.hadoop.hive.serde2.obj
 public class FetchWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
-  private String tblDir;
+  private Path tblDir;
   private TableDesc tblDesc;
 
-  private ArrayList<String> partDir;
+  private ArrayList<Path> partDir;
   private ArrayList<PartitionDesc> partDesc;
 
   private Operator<?> source;
@@ -75,24 +75,24 @@ public class FetchWork implements Serial
     return rowsComputedFromStats;
   }
 
-  public FetchWork(String tblDir, TableDesc tblDesc) {
+  public FetchWork(Path tblDir, TableDesc tblDesc) {
     this(tblDir, tblDesc, -1);
   }
 
-  public FetchWork(String tblDir, TableDesc tblDesc, int limit) {
+  public FetchWork(Path tblDir, TableDesc tblDesc, int limit) {
     this.tblDir = tblDir;
     this.tblDesc = tblDesc;
     this.limit = limit;
   }
 
-  public FetchWork(List<String> partDir, List<PartitionDesc> partDesc, TableDesc tblDesc) {
+  public FetchWork(List<Path> partDir, List<PartitionDesc> partDesc, TableDesc tblDesc) {
     this(partDir, partDesc, tblDesc, -1);
   }
 
-  public FetchWork(List<String> partDir, List<PartitionDesc> partDesc,
+  public FetchWork(List<Path> partDir, List<PartitionDesc> partDesc,
       TableDesc tblDesc, int limit) {
     this.tblDesc = tblDesc;
-    this.partDir = new ArrayList<String>(partDir);
+    this.partDir = new ArrayList<Path>(partDir);
     this.partDesc = new ArrayList<PartitionDesc>(partDesc);
     this.limit = limit;
   }
@@ -124,22 +124,15 @@ public class FetchWork implements Serial
   /**
    * @return the tblDir
    */
-  public String getTblDir() {
+  public Path getTblDir() {
     return tblDir;
   }
 
   /**
-   * @return the tblDir
-   */
-  public Path getTblDirPath() {
-    return new Path(tblDir);
-  }
-
-  /**
    * @param tblDir
    *          the tblDir to set
    */
-  public void setTblDir(String tblDir) {
+  public void setTblDir(Path tblDir) {
     this.tblDir = tblDir;
   }
 
@@ -161,45 +154,15 @@ public class FetchWork implements Serial
   /**
    * @return the partDir
    */
-  public ArrayList<String> getPartDir() {
+  public ArrayList<Path> getPartDir() {
     return partDir;
   }
 
-  public List<Path> getPartDirPath() {
-    return FetchWork.convertStringToPathArray(partDir);
-  }
-
-  public static List<String> convertPathToStringArray(List<Path> paths) {
-    if (paths == null) {
-      return null;
-    }
-
-    List<String> pathsStr = new ArrayList<String>();
-    for (Path path : paths) {
-      pathsStr.add(path.toString());
-    }
-
-    return pathsStr;
-  }
-
-  public static List<Path> convertStringToPathArray(List<String> paths) {
-    if (paths == null) {
-      return null;
-    }
-
-    List<Path> pathsStr = new ArrayList<Path>();
-    for (String path : paths) {
-      pathsStr.add(new Path(path));
-    }
-
-    return pathsStr;
-  }
-
   /**
    * @param partDir
    *          the partDir to set
    */
-  public void setPartDir(ArrayList<String> partDir) {
+  public void setPartDir(ArrayList<Path> partDir) {
     this.partDir = partDir;
   }
 
@@ -228,7 +191,7 @@ public class FetchWork implements Serial
       // Construct a sorted Map of Partition Dir - Partition Descriptor; ordering is based on
       // patition dir (map key)
       // Assumption: there is a 1-1 mapping between partition dir and partition descriptor lists
-      TreeMap<String, PartitionDesc> partDirToPartSpecMap = new TreeMap<String, PartitionDesc>();
+      TreeMap<Path, PartitionDesc> partDirToPartSpecMap = new TreeMap<Path, PartitionDesc>();
       for (int i = 0; i < partDir.size(); i++) {
         partDirToPartSpecMap.put(partDir.get(i), partDesc.get(i));
       }
@@ -246,7 +209,7 @@ public class FetchWork implements Serial
   public List<PartitionDesc> getPartDescs(List<Path> paths) {
     List<PartitionDesc> parts = new ArrayList<PartitionDesc>(paths.size());
     for (Path path : paths) {
-      parts.add(partDesc.get(partDir.indexOf(path.getParent().toString())));
+      parts.add(partDesc.get(partDir.indexOf(path.getParent())));
     }
     return parts;
   }
@@ -319,8 +282,8 @@ public class FetchWork implements Serial
     }
 
     String ret = "partition = ";
-    for (String part : partDir) {
-      ret = ret.concat(part);
+    for (Path part : partDir) {
+      ret = ret.concat(part.toUri().toString());
     }
 
     return ret;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java Thu Jan  2 02:42:15 2014
@@ -37,11 +37,7 @@ public class LoadDesc implements Seriali
     this.sourcePath = sourcePath;
   }
 
-  @Explain(displayName = "source", normalExplain = false)
-  public String getSourceDir() {
-    return sourcePath.toString();
-  }
-  
+  @Explain(displayName = "source", normalExplain = false)  
   public Path getSourcePath() {
     return sourcePath;
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java Thu Jan  2 02:42:15 2014
@@ -35,7 +35,7 @@ public class LoadMultiFilesDesc implemen
   // list of columns, comma separated
   private String columns;
   private String columnTypes;
-  private List<String> srcDirs;
+  private transient List<Path> srcDirs;
 
   static {
 	  PTFUtils.makeTransient(LoadMultiFilesDesc.class, "targetDirs");
@@ -43,7 +43,7 @@ public class LoadMultiFilesDesc implemen
   public LoadMultiFilesDesc() {
   }
 
-  public LoadMultiFilesDesc(final List<String> sourceDirs, final List<Path> targetDir,
+  public LoadMultiFilesDesc(final List<Path> sourceDirs, final List<Path> targetDir,
       final boolean isDfsDir, final String columns, final String columnTypes) {
 
     this.srcDirs = sourceDirs;
@@ -59,11 +59,11 @@ public class LoadMultiFilesDesc implemen
   }
 
   @Explain(displayName = "sources")
-  public List<String> getSourceDirs() {
+  public List<Path> getSourceDirs() {
     return srcDirs;
   }
 
-  public void setSourceDirs(List<String> srcs) {
+  public void setSourceDirs(List<Path> srcs) {
     this.srcDirs = srcs;
   }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java Thu Jan  2 02:42:15 2014
@@ -32,7 +32,6 @@ public class LoadTableDesc extends org.a
     implements Serializable {
   private static final long serialVersionUID = 1L;
   private boolean replace;
-  private String tmpDir;
   private DynamicPartitionCtx dpCtx;
   private ListBucketingCtx lbCtx;
   private boolean holdDDLTime;
@@ -47,35 +46,34 @@ public class LoadTableDesc extends org.a
     this.holdDDLTime = false;
   }
 
-  public LoadTableDesc(final Path sourcePath, final String tmpDir,
+  public LoadTableDesc(final Path sourcePath,
       final org.apache.hadoop.hive.ql.plan.TableDesc table,
       final Map<String, String> partitionSpec, final boolean replace) {
     super(sourcePath);
-    init(tmpDir, table, partitionSpec, replace);
+    init(table, partitionSpec, replace);
   }
 
-  public LoadTableDesc(final Path sourcePath, final String tmpDir,
+  public LoadTableDesc(final Path sourcePath,
       final org.apache.hadoop.hive.ql.plan.TableDesc table,
       final Map<String, String> partitionSpec) {
-    this(sourcePath, tmpDir, table, partitionSpec, true);
+    this(sourcePath, table, partitionSpec, true);
   }
 
-  public LoadTableDesc(final Path sourcePath, final String tmpDir,
+  public LoadTableDesc(final Path sourcePath,
       final org.apache.hadoop.hive.ql.plan.TableDesc table,
       final DynamicPartitionCtx dpCtx) {
     super(sourcePath);
     this.dpCtx = dpCtx;
     if (dpCtx != null && dpCtx.getPartSpec() != null && partitionSpec == null) {
-      init(tmpDir, table, dpCtx.getPartSpec(), true);
+      init(table, dpCtx.getPartSpec(), true);
     } else {
-      init(tmpDir, table, new LinkedHashMap<String, String>(), true);
+      init(table, new LinkedHashMap<String, String>(), true);
     }
   }
 
-  private void init(final String tmpDir,
+  private void init(
       final org.apache.hadoop.hive.ql.plan.TableDesc table,
       final Map<String, String> partitionSpec, final boolean replace) {
-    this.tmpDir = tmpDir;
     this.table = table;
     this.partitionSpec = partitionSpec;
     this.replace = replace;
@@ -90,15 +88,6 @@ public class LoadTableDesc extends org.a
     return holdDDLTime;
   }
 
-  @Explain(displayName = "tmp directory", normalExplain = false)
-  public String getTmpDir() {
-    return tmpDir;
-  }
-
-  public void setTmpDir(final String tmp) {
-    tmpDir = tmp;
-  }
-
   @Explain(displayName = "table")
   public TableDesc getTable() {
     return table;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java?rev=1554722&r1=1554721&r2=1554722&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java Thu Jan  2 02:42:15 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
-import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 
@@ -39,7 +38,6 @@ public class MoveWork implements Seriali
   private LoadMultiFilesDesc loadMultiFilesWork;
 
   private boolean checkFileFormat;
-  ArrayList<String> dpSpecPaths; // dynamic partition specified paths -- the root of DP columns
 
   /**
    * ReadEntitites that are passed to the hooks.
@@ -72,14 +70,6 @@ public class MoveWork implements Seriali
     this.checkFileFormat = checkFileFormat;
   }
 
-  public void setDPSpecPaths(ArrayList<String> dpsp) {
-    dpSpecPaths = dpsp;
-  }
-
-  public ArrayList<String> getDPSpecPaths() {
-    return dpSpecPaths;
-  }
-
   @Explain(displayName = "tables")
   public LoadTableDesc getLoadTableWork() {
     return loadTableWork;



Mime
View raw message