hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From w...@apache.org
Subject hive git commit: HIVE-14167 : Use work directories provided by Tez instead of directly using YARN local dirs (Wei Zheng, reviewed by Siddharth Seth)
Date Thu, 21 Jul 2016 20:53:01 GMT
Repository: hive
Updated Branches:
  refs/heads/branch-2.1 21466bb3f -> e96994b10


HIVE-14167 : Use work directories provided by Tez instead of directly using YARN local dirs
(Wei Zheng, reviewed by Siddharth Seth)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e96994b1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e96994b1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e96994b1

Branch: refs/heads/branch-2.1
Commit: e96994b1078ee8b79d59beee481d96b922147650
Parents: 21466bb
Author: Wei Zheng <weiz@apache.org>
Authored: Thu Jul 21 13:52:48 2016 -0700
Committer: Wei Zheng <weiz@apache.org>
Committed: Thu Jul 21 13:54:12 2016 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/llap/LlapUtil.java   |  6 -----
 .../hive/llap/daemon/impl/LlapDaemon.java       |  3 ++-
 .../persistence/HybridHashTableContainer.java   |  3 ++-
 .../hive/ql/exec/persistence/RowContainer.java  | 10 ++-------
 .../hadoop/hive/ql/metadata/HiveUtils.java      | 23 ++++++++++++++++++++
 5 files changed, 29 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/e96994b1/llap-common/src/java/org/apache/hadoop/hive/llap/LlapUtil.java
----------------------------------------------------------------------
diff --git a/llap-common/src/java/org/apache/hadoop/hive/llap/LlapUtil.java b/llap-common/src/java/org/apache/hadoop/hive/llap/LlapUtil.java
index 505ddb1..06323f5 100644
--- a/llap-common/src/java/org/apache/hadoop/hive/llap/LlapUtil.java
+++ b/llap-common/src/java/org/apache/hadoop/hive/llap/LlapUtil.java
@@ -27,12 +27,6 @@ import org.slf4j.LoggerFactory;
 public class LlapUtil {
   private static final Logger LOG = LoggerFactory.getLogger(LlapUtil.class);
 
-  public static String getDaemonLocalDirList(Configuration conf) {
-    String localDirList = HiveConf.getVar(conf, ConfVars.LLAP_DAEMON_WORK_DIRS);
-    if (localDirList != null && !localDirList.isEmpty()) return localDirList;
-    return conf.get("yarn.nodemanager.local-dirs");
-  }
-
   public static UserGroupInformation loginWithKerberos(
       String principal, String keytabFile) throws IOException {
     if (!UserGroupInformation.isSecurityEnabled()) return null;

http://git-wip-us.apache.org/repos/asf/hive/blob/e96994b1/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
index 2faedcd..d6cf01d 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
@@ -57,6 +57,7 @@ import org.apache.hadoop.hive.llap.registry.impl.LlapRegistryService;
 import org.apache.hadoop.hive.llap.shufflehandler.ShuffleHandler;
 import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge.UdfWhitelistChecker;
@@ -420,7 +421,7 @@ public class LlapDaemon extends CompositeService implements ContainerRunner,
Lla
 
       int numExecutors = HiveConf.getIntVar(daemonConf, ConfVars.LLAP_DAEMON_NUM_EXECUTORS);
 
-      String localDirList = LlapUtil.getDaemonLocalDirList(daemonConf);
+      String localDirList = HiveUtils.getLocalDirList(daemonConf);
       String[] localDirs = (localDirList == null || localDirList.isEmpty()) ?
           new String[0] : StringUtils.getTrimmedStrings(localDirList);
       int rpcPort = HiveConf.getIntVar(daemonConf, ConfVars.LLAP_DAEMON_RPC_PORT);

http://git-wip-us.apache.org/repos/asf/hive/blob/e96994b1/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
index 233f66b..e4a2b35 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
 import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinRowBytesContainer;
 import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -276,7 +277,7 @@ public class HybridHashTableContainer
         HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVEHYBRIDGRACEHASHJOINMINNUMPARTITIONS),
         HiveConf.getFloatVar(hconf, HiveConf.ConfVars.HIVEMAPJOINOPTIMIZEDTABLEPROBEPERCENT),
         estimatedTableSize, keyCount, memoryAvailable, nwayConf,
-        RowContainer.getLocalDirsForSpillFiles(hconf));
+        HiveUtils.getLocalDirList(hconf));
   }
 
   private HybridHashTableContainer(float keyCountAdj, int threshold, float loadFactor,

http://git-wip-us.apache.org/repos/asf/hive/blob/e96994b1/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
index 893d265..e928719 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
@@ -32,13 +32,12 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
-import org.apache.hadoop.hive.llap.LlapUtil;
-import org.apache.hadoop.hive.llap.io.api.LlapProxy;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -135,7 +134,7 @@ public class RowContainer<ROW extends List<Object>>
     this.size = 0;
     this.itrCursor = 0;
     this.addCursor = 0;
-    this.spillFileDirs = getLocalDirsForSpillFiles(jc);
+    this.spillFileDirs = HiveUtils.getLocalDirList(jc);
     this.numFlushedBlocks = 0;
     this.tmpFile = null;
     this.currentWriteBlock = (ROW[]) new ArrayList[blockSize];
@@ -151,11 +150,6 @@ public class RowContainer<ROW extends List<Object>>
     }
   }
 
-  public static String getLocalDirsForSpillFiles(Configuration conf) {
-    return LlapProxy.isDaemon()
-        ? LlapUtil.getDaemonLocalDirList(conf) : conf.get("yarn.nodemanager.local-dirs");
-  }
-
   private JobConf getLocalFSJobConfClone(Configuration jc) {
     if (this.jobCloneUsingLocalFs == null) {
       this.jobCloneUsingLocalFs = new JobConf(jc);

http://git-wip-us.apache.org/repos/asf/hive/blob/e96994b1/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
index feb471a..47a93fb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
@@ -26,8 +26,10 @@ import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.llap.io.api.LlapProxy;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.tez.TezContext;
 import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
 import org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator;
 import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
@@ -38,6 +40,7 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFac
 import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * General collection of helper functions.
@@ -451,4 +454,24 @@ public final class HiveUtils {
     }
     return sb.toString();
   }
+
+  public static String getLocalDirList(Configuration conf) {
+    String localDirList;
+
+    if (LlapProxy.isDaemon()) {
+      localDirList = HiveConf.getVar(conf, HiveConf.ConfVars.LLAP_DAEMON_WORK_DIRS);
+      if (localDirList != null && !localDirList.isEmpty()) {
+        return localDirList;
+      } // otherwise, fall back to use tez work dirs
+    }
+
+    if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
+      TezContext tezContext = (TezContext) TezContext.get();
+      if (tezContext != null && tezContext.getTezProcessorContext() != null) {
+        return StringUtils.arrayToString(tezContext.getTezProcessorContext().getWorkDirs());
+      } // otherwise fall back to return null, i.e. to use local tmp dir only
+    }
+
+    return null;
+  }
 }


Mime
View raw message