hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sze...@apache.org
Subject [09/44] hive git commit: HIVE-9671 - Support Impersonation [Spark Branch] (Brock via Xuefu)
Date Thu, 23 Apr 2015 02:34:18 GMT
HIVE-9671 - Support Impersonation [Spark Branch] (Brock via Xuefu)

git-svn-id: https://svn.apache.org/repos/asf/hive/branches/spark@1661599 13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cc3ed373
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cc3ed373
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cc3ed373

Branch: refs/heads/master
Commit: cc3ed37371d9ee9275a783d10bf8c6594e0e038f
Parents: 0b63875
Author: Brock Noland <brock@apache.org>
Authored: Mon Feb 23 02:44:47 2015 +0000
Committer: Szehon Ho <szehon@cloudera.com>
Committed: Wed Apr 22 19:33:49 2015 -0700

----------------------------------------------------------------------
 .../java/org/apache/hadoop/hive/ql/QTestUtil.java    |  8 +++++---
 .../org/apache/hadoop/hive/shims/Hadoop23Shims.java  | 15 ++++++++++++++-
 .../org/apache/hive/spark/client/RemoteDriver.java   |  8 ++++++--
 .../apache/hive/spark/client/SparkClientImpl.java    | 11 +++++++++++
 4 files changed, 36 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/cc3ed373/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index f52350d..f6081f4 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -913,13 +913,15 @@ public class QTestUtil {
             long endTime = System.currentTimeMillis() + 240000;
             while (sparkSession.getMemoryAndCores().getSecond() <= 1) {
               if (System.currentTimeMillis() >= endTime) {
-                LOG.error("Timed out waiting for Spark cluster to init");
-                break;
+                String msg = "Timed out waiting for Spark cluster to init";
+                throw new IllegalStateException(msg);
               }
               Thread.sleep(100);
             }
           } catch (Exception e) {
-            LOG.error(e);
+            String msg = "Error trying to obtain executor info: " + e;
+            LOG.error(msg, e);
+            throw new IllegalStateException(msg, e);
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/cc3ed373/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
----------------------------------------------------------------------
diff --git a/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java b/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
index 997e49d..2997286 100644
--- a/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
+++ b/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
@@ -428,6 +428,18 @@ public class Hadoop23Shims extends HadoopShimsSecure {
     }
   }
 
+  private void configureImpersonation(Configuration conf) {
+    String user;
+    try {
+      user = Utils.getUGI().getShortUserName();
+    } catch (Exception e) {
+      String msg = "Cannot obtain username: " + e;
+      throw new IllegalStateException(msg, e);
+    }
+    conf.set("hadoop.proxyuser." + user + ".groups", "*");
+    conf.set("hadoop.proxyuser." + user + ".hosts", "*");
+  }
+
   /**
    * Returns a shim to wrap MiniSparkOnYARNCluster
    */
@@ -447,10 +459,10 @@ public class Hadoop23Shims extends HadoopShimsSecure {
 
     public MiniSparkShim(Configuration conf, int numberOfTaskTrackers,
       String nameNode, int numDir) throws IOException {
-
       mr = new MiniSparkOnYARNCluster("sparkOnYarn");
       conf.set("fs.defaultFS", nameNode);
       conf.set("yarn.resourcemanager.scheduler.class", "org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler");
+      configureImpersonation(conf);
       mr.init(conf);
       mr.start();
       this.conf = mr.getConfig();
@@ -505,6 +517,7 @@ public class Hadoop23Shims extends HadoopShimsSecure {
       int numDataNodes,
       boolean format,
       String[] racks) throws IOException {
+    configureImpersonation(conf);
     MiniDFSCluster miniDFSCluster = new MiniDFSCluster(conf, numDataNodes, format, racks);
 
     // Need to set the client's KeyProvider to the NN's for JKS,

http://git-wip-us.apache.org/repos/asf/hive/blob/cc3ed373/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java
----------------------------------------------------------------------
diff --git a/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java b/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java
index c2ac0c2..4e15902 100644
--- a/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java
+++ b/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java
@@ -166,7 +166,7 @@ public class RemoteDriver {
         jcLock.notifyAll();
       }
     } catch (Exception e) {
-      LOG.error("Failed to start SparkContext.", e);
+      LOG.error("Failed to start SparkContext: " + e, e);
       shutdown(e);
       synchronized (jcLock) {
         jcLock.notifyAll();
@@ -203,7 +203,11 @@ public class RemoteDriver {
 
   private synchronized void shutdown(Throwable error) {
     if (running) {
-      LOG.info("Shutting down remote driver.");
+      if (error == null) {
+        LOG.info("Shutting down remote driver.");
+      } else {
+        LOG.error("Shutting down remote driver due to error: " + error, error);
+      }
       running = false;
       for (JobWrapper<?> job : activeJobs.values()) {
         cancelJob(job);

http://git-wip-us.apache.org/repos/asf/hive/blob/cc3ed373/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
----------------------------------------------------------------------
diff --git a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
index 9f9a1c1..ba08106 100644
--- a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
+++ b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
@@ -47,6 +47,7 @@ import java.util.concurrent.Future;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hive.spark.client.rpc.Rpc;
 import org.apache.hive.spark.client.rpc.RpcConfiguration;
 import org.apache.hive.spark.client.rpc.RpcServer;
@@ -350,6 +351,16 @@ class SparkClientImpl implements SparkClient {
         }
       }
 
+      if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
+        argv.add("--proxy-user");
+        try {
+          argv.add(Utils.getUGI().getShortUserName());
+        } catch (Exception e) {
+          String msg = "Cannot obtain username: " + e;
+          throw new IllegalStateException(msg, e);
+        }
+      }
+
       argv.add("--properties-file");
       argv.add(properties.getAbsolutePath());
       argv.add("--class");


Mime
View raw message