hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From li...@apache.org
Subject hive git commit: HIVE-10816: NPE in ExecDriver::handleSampling when submitted via child JVM (Rui reviewed by Xuefu)
Date Tue, 09 Jun 2015 07:35:08 GMT
Repository: hive
Updated Branches:
  refs/heads/master 8f9d96400 -> 735ba0d87


HIVE-10816: NPE in ExecDriver::handleSampling when submitted via child JVM (Rui reviewed by
Xuefu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/735ba0d8
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/735ba0d8
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/735ba0d8

Branch: refs/heads/master
Commit: 735ba0d872ddfbe0470497576904d721350548a4
Parents: 8f9d964
Author: Rui Li <rui.li@intel.com>
Authored: Tue Jun 9 15:30:14 2015 +0800
Committer: Rui Li <rui.li@intel.com>
Committed: Tue Jun 9 15:34:42 2015 +0800

----------------------------------------------------------------------
 .../apache/hadoop/hive/ql/exec/PartitionKeySampler.java |  9 +++++----
 .../org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java   | 12 ++++++------
 2 files changed, 11 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/735ba0d8/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java
index 96f4530..dc1b601 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java
@@ -112,7 +112,7 @@ public class PartitionKeySampler implements OutputCollector<HiveKey,
Object> {
     return partitionKeys;
   }
 
-  public void writePartitionKeys(Path path, HiveConf conf, JobConf job) throws IOException
{
+  public void writePartitionKeys(Path path, JobConf job) throws IOException {
     byte[][] partitionKeys = getPartitionKeys(job.getNumReduceTasks());
     int numPartition = partitionKeys.length + 1;
     if (numPartition != job.getNumReduceTasks()) {
@@ -133,10 +133,11 @@ public class PartitionKeySampler implements OutputCollector<HiveKey,
Object> {
   }
 
   // random sampling
-  public static FetchOperator createSampler(FetchWork work, HiveConf conf, JobConf job,
+  public static FetchOperator createSampler(FetchWork work, JobConf job,
       Operator<?> operator) throws HiveException {
-    int sampleNum = conf.getIntVar(HiveConf.ConfVars.HIVESAMPLINGNUMBERFORORDERBY);
-    float samplePercent = conf.getFloatVar(HiveConf.ConfVars.HIVESAMPLINGPERCENTFORORDERBY);
+    int sampleNum = HiveConf.getIntVar(job, HiveConf.ConfVars.HIVESAMPLINGNUMBERFORORDERBY);
+    float samplePercent =
+        HiveConf.getFloatVar(job, HiveConf.ConfVars.HIVESAMPLINGPERCENTFORORDERBY);
     if (samplePercent < 0.0 || samplePercent > 1.0) {
       throw new IllegalArgumentException("Percentile value must be within the range of 0
to 1.");
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/735ba0d8/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
index e4f9543..a2cf712 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
@@ -376,7 +376,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable,
Hadoop
 
       if (mWork.getSamplingType() > 0 && rWork != null && job.getNumReduceTasks()
> 1) {
         try {
-          handleSampling(driverContext, mWork, job, conf);
+          handleSampling(ctx, mWork, job);
           job.setPartitionerClass(HiveTotalOrderPartitioner.class);
         } catch (IllegalStateException e) {
           console.printInfo("Not enough sampling data.. Rolling back to single reducer task");
@@ -496,7 +496,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable,
Hadoop
     return (returnVal);
   }
 
-  private void handleSampling(DriverContext context, MapWork mWork, JobConf job, HiveConf
conf)
+  private void handleSampling(Context context, MapWork mWork, JobConf job)
       throws Exception {
     assert mWork.getAliasToWork().keySet().size() == 1;
 
@@ -512,7 +512,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable,
Hadoop
       inputPaths.add(new Path(path));
     }
 
-    Path tmpPath = context.getCtx().getExternalTmpPath(inputPaths.get(0));
+    Path tmpPath = context.getExternalTmpPath(inputPaths.get(0));
     Path partitionFile = new Path(tmpPath, ".partitions");
     ShimLoader.getHadoopShims().setTotalOrderPartitionFile(job, partitionFile);
     PartitionKeySampler sampler = new PartitionKeySampler();
@@ -541,9 +541,9 @@ public class ExecDriver extends Task<MapredWork> implements Serializable,
Hadoop
       fetchWork.setSource(ts);
 
       // random sampling
-      FetchOperator fetcher = PartitionKeySampler.createSampler(fetchWork, conf, job, ts);
+      FetchOperator fetcher = PartitionKeySampler.createSampler(fetchWork, job, ts);
       try {
-        ts.initialize(conf, new ObjectInspector[]{fetcher.getOutputObjectInspector()});
+        ts.initialize(job, new ObjectInspector[]{fetcher.getOutputObjectInspector()});
         OperatorUtils.setChildrenCollector(ts.getChildOperators(), sampler);
         while (fetcher.pushRow()) { }
       } finally {
@@ -552,7 +552,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable,
Hadoop
     } else {
       throw new IllegalArgumentException("Invalid sampling type " + mWork.getSamplingType());
     }
-    sampler.writePartitionKeys(partitionFile, conf, job);
+    sampler.writePartitionKeys(partitionFile, job);
   }
 
   /**


Mime
View raw message