hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From xu...@apache.org
Subject [1/5] hive git commit: HIVE-10999: Upgrade Spark dependency to 1.4 [Spark Branch] (Rui reviewed by Chengxiang & Xuefu)
Date Sun, 02 Aug 2015 03:35:13 GMT
Repository: hive
Updated Branches:
  refs/heads/branch-1 9e13be3e3 -> a56d9f739


HIVE-10999: Upgrade Spark dependency to 1.4 [Spark Branch] (Rui reviewed by Chengxiang &
Xuefu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/488fa113
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/488fa113
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/488fa113

Branch: refs/heads/branch-1
Commit: 488fa11303ad29ae4d19d67739abe292ef193b71
Parents: 9e13be3
Author: Rui Li <rui.li@intel.com>
Authored: Wed Jun 24 15:58:55 2015 +0800
Committer: xzhang <xzhang@xzdt>
Committed: Sat Aug 1 20:24:50 2015 -0700

----------------------------------------------------------------------
 pom.xml                                                |  2 +-
 ql/pom.xml                                             |  5 +++++
 .../java/org/apache/hadoop/hive/ql/exec/Utilities.java |  1 +
 .../hadoop/hive/ql/exec/spark/KryoSerializer.java      |  4 ++++
 .../hive/ql/exec/spark/RemoteHiveSparkClient.java      |  1 +
 spark-client/pom.xml                                   |  5 +++++
 .../apache/hive/spark/client/SparkClientUtilities.java | 13 ++++++++-----
 .../org/apache/hive/spark/client/TestSparkClient.java  |  4 ++--
 8 files changed, 27 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/488fa113/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 82ac4e8..dd68fd5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -160,7 +160,7 @@
     <ST4.version>4.0.4</ST4.version>
     <tez.version>0.5.2</tez.version>
     <super-csv.version>2.2.0</super-csv.version>
-    <spark.version>1.3.1</spark.version>
+    <spark.version>1.4.0</spark.version>
     <scala.binary.version>2.10</scala.binary.version>
     <scala.version>2.10.4</scala.version>
     <tempus-fugit.version>1.1</tempus-fugit.version>

http://git-wip-us.apache.org/repos/asf/hive/blob/488fa113/ql/pom.xml
----------------------------------------------------------------------
diff --git a/ql/pom.xml b/ql/pom.xml
index 8c8a49d..97a0a71 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -474,6 +474,11 @@
       <version>${spark.version}</version>
       <optional>true</optional>
     </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-servlet</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
   <profiles>

http://git-wip-us.apache.org/repos/asf/hive/blob/488fa113/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 7280674..04df06b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -386,6 +386,7 @@ public final class Utilities {
           ClassLoader loader = Thread.currentThread().getContextClassLoader();
           ClassLoader newLoader = addToClassPath(loader, addedJars.split(";"));
           Thread.currentThread().setContextClassLoader(newLoader);
+          runtimeSerializationKryo.get().setClassLoader(newLoader);
         }
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/488fa113/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
index ff9fb85..f1d7368 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java
@@ -80,4 +80,8 @@ public class KryoSerializer {
     return conf;
   }
 
+  public static void setClassLoader(ClassLoader classLoader) {
+    Utilities.sparkSerializationKryo.get().setClassLoader(classLoader);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/488fa113/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
index 8b15099..4073d2b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
@@ -232,6 +232,7 @@ public class RemoteHiveSparkClient implements HiveSparkClient {
       Set<String> addedJars = jc.getAddedJars();
       if (addedJars != null && !addedJars.isEmpty()) {
         SparkClientUtilities.addToClassPath(addedJars, localJobConf, jc.getLocalTmpDir());
+        KryoSerializer.setClassLoader(Thread.currentThread().getContextClassLoader());
         localJobConf.set(Utilities.HIVE_ADDED_JARS, StringUtils.join(addedJars, ";"));
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/488fa113/spark-client/pom.xml
----------------------------------------------------------------------
diff --git a/spark-client/pom.xml b/spark-client/pom.xml
index f0a8458..4fccf80 100644
--- a/spark-client/pom.xml
+++ b/spark-client/pom.xml
@@ -70,6 +70,11 @@
       <artifactId>mockito-all</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-servlet</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
   <build>

http://git-wip-us.apache.org/repos/asf/hive/blob/488fa113/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java
----------------------------------------------------------------------
diff --git a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java
b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java
index b079ee2..589436d 100644
--- a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java
+++ b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java
@@ -43,21 +43,24 @@ public class SparkClientUtilities {
    */
   public static void addToClassPath(Set<String> newPaths, Configuration conf, File
localTmpDir)
       throws Exception {
-    ClassLoader cloader = Thread.currentThread().getContextClassLoader();
-    URLClassLoader loader = (URLClassLoader) cloader;
+    URLClassLoader loader = (URLClassLoader) Thread.currentThread().getContextClassLoader();
     List<URL> curPath = Lists.newArrayList(loader.getURLs());
 
+    boolean newPathAdded = false;
     for (String newPath : newPaths) {
       URL newUrl = urlFromPathString(newPath, conf, localTmpDir);
       if (newUrl != null && !curPath.contains(newUrl)) {
         curPath.add(newUrl);
         LOG.info("Added jar[" + newUrl + "] to classpath.");
+        newPathAdded = true;
       }
     }
 
-    URLClassLoader newLoader =
-        new URLClassLoader(curPath.toArray(new URL[curPath.size()]), loader);
-    Thread.currentThread().setContextClassLoader(newLoader);
+    if (newPathAdded) {
+      URLClassLoader newLoader =
+          new URLClassLoader(curPath.toArray(new URL[curPath.size()]), loader);
+      Thread.currentThread().setContextClassLoader(newLoader);
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/488fa113/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
----------------------------------------------------------------------
diff --git a/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
b/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
index d33ad7e..ea83125 100644
--- a/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
+++ b/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
@@ -168,7 +168,7 @@ public class TestSparkClient {
         future.get(TIMEOUT, TimeUnit.SECONDS);
         MetricsCollection metrics = future.getMetrics();
         assertEquals(1, metrics.getJobIds().size());
-        assertTrue(metrics.getAllMetrics().executorRunTime > 0L);
+        assertTrue(metrics.getAllMetrics().executorRunTime >= 0L);
         verify(listener).onSparkJobStarted(same(future),
           eq(metrics.getJobIds().iterator().next()));
 
@@ -179,7 +179,7 @@ public class TestSparkClient {
         MetricsCollection metrics2 = future2.getMetrics();
         assertEquals(1, metrics2.getJobIds().size());
         assertFalse(Objects.equal(metrics.getJobIds(), metrics2.getJobIds()));
-        assertTrue(metrics2.getAllMetrics().executorRunTime > 0L);
+        assertTrue(metrics2.getAllMetrics().executorRunTime >= 0L);
         verify(listener2).onSparkJobStarted(same(future2),
           eq(metrics2.getJobIds().iterator().next()));
       }


Mime
View raw message