hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ser...@apache.org
Subject hive git commit: HIVE-13930 : upgrade Hive to Hadoop 2.7.2 (Sergey Shelukhin, reviewed by Sergio Peña)
Date Mon, 29 Aug 2016 18:33:51 GMT
Repository: hive
Updated Branches:
  refs/heads/master c97450cf4 -> cf9538b9b


HIVE-13930 : upgrade Hive to Hadoop 2.7.2 (Sergey Shelukhin, reviewed by Sergio Peña)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cf9538b9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cf9538b9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cf9538b9

Branch: refs/heads/master
Commit: cf9538b9b2860247babed885afcaa60528a6340a
Parents: c97450c
Author: Sergey Shelukhin <sershe@apache.org>
Authored: Mon Aug 29 11:28:36 2016 -0700
Committer: Sergey Shelukhin <sershe@apache.org>
Committed: Mon Aug 29 11:28:36 2016 -0700

----------------------------------------------------------------------
 itests/hive-unit-hadoop2/pom.xml                |  6 +++
 itests/hive-unit/pom.xml                        | 18 +++++++
 itests/qtest/pom.xml                            | 12 +++++
 pom.xml                                         |  6 +--
 .../clientpositive/encryption_move_tbl.q        | 15 +++++-
 .../encrypted/encryption_move_tbl.q.out         | 57 +++++++++++++++++---
 shims/0.23/pom.xml                              | 16 ++++++
 .../apache/hadoop/hive/shims/ShimLoader.java    | 10 +++-
 .../hive/spark/client/TestSparkClient.java      | 47 +++++++++++++++-
 9 files changed, 173 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/cf9538b9/itests/hive-unit-hadoop2/pom.xml
----------------------------------------------------------------------
diff --git a/itests/hive-unit-hadoop2/pom.xml b/itests/hive-unit-hadoop2/pom.xml
index fbf1c73..44135d6 100644
--- a/itests/hive-unit-hadoop2/pom.xml
+++ b/itests/hive-unit-hadoop2/pom.xml
@@ -181,6 +181,12 @@
       <scope>test</scope>
     </dependency>
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-common</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-server</artifactId>
       <version>${hbase.version}</version>

http://git-wip-us.apache.org/repos/asf/hive/blob/cf9538b9/itests/hive-unit/pom.xml
----------------------------------------------------------------------
diff --git a/itests/hive-unit/pom.xml b/itests/hive-unit/pom.xml
index b241daa..cd209b4 100644
--- a/itests/hive-unit/pom.xml
+++ b/itests/hive-unit/pom.xml
@@ -212,6 +212,12 @@
       <scope>test</scope>
     </dependency>
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-common</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-server</artifactId>
       <version>${hbase.version}</version>
@@ -354,6 +360,18 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-common</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-api</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-yarn-registry</artifactId>
       <version>${hadoop.version}</version>
       <optional>true</optional>

http://git-wip-us.apache.org/repos/asf/hive/blob/cf9538b9/itests/qtest/pom.xml
----------------------------------------------------------------------
diff --git a/itests/qtest/pom.xml b/itests/qtest/pom.xml
index ed44bb8..7fc72b9 100644
--- a/itests/qtest/pom.xml
+++ b/itests/qtest/pom.xml
@@ -248,6 +248,18 @@
       <scope>test</scope>
     </dependency>
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-common</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-api</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-common</artifactId>
       <version>${hbase.version}</version>

http://git-wip-us.apache.org/repos/asf/hive/blob/cf9538b9/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 9ed1c19..4c41200 100644
--- a/pom.xml
+++ b/pom.xml
@@ -132,7 +132,7 @@
     <dropwizard-metrics-hadoop-metrics2-reporter.version>0.1.2</dropwizard-metrics-hadoop-metrics2-reporter.version>
     <guava.version>14.0.1</guava.version>
     <groovy.version>2.4.4</groovy.version>
-    <hadoop.version>2.6.1</hadoop.version>
+    <hadoop.version>2.7.2</hadoop.version>
     <hadoop.bin.path>${basedir}/${hive.path.to.root}/testutils/hadoop</hadoop.bin.path>
     <hbase.version>1.1.1</hbase.version>
     <!-- required for logging test to avoid including hbase which pulls disruptor transitively
-->
@@ -141,7 +141,7 @@
     <httpcomponents.client.version>4.4</httpcomponents.client.version>
     <httpcomponents.core.version>4.4</httpcomponents.core.version>
     <ivy.version>2.4.0</ivy.version>
-    <jackson.version>1.9.2</jackson.version>
+    <jackson.version>1.9.13</jackson.version>
     <!-- jackson 1 and 2 lines can coexist without issue, as they have different artifactIds
-->
     <jackson.new.version>2.4.2</jackson.new.version>
     <jasper.version>5.5.23</jasper.version>
@@ -187,7 +187,7 @@
     <zookeeper.version>3.4.6</zookeeper.version>
     <jpam.version>1.1</jpam.version>
     <felix.version>2.4.0</felix.version>
-    <curator.version>2.6.0</curator.version>
+    <curator.version>2.7.1</curator.version>
     <jsr305.version>3.0.0</jsr305.version>
     <tephra.version>0.6.0</tephra.version>
     <gson.version>2.2.4</gson.version>

http://git-wip-us.apache.org/repos/asf/hive/blob/cf9538b9/ql/src/test/queries/clientpositive/encryption_move_tbl.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/encryption_move_tbl.q b/ql/src/test/queries/clientpositive/encryption_move_tbl.q
index 5a8c036..a25d955 100644
--- a/ql/src/test/queries/clientpositive/encryption_move_tbl.q
+++ b/ql/src/test/queries/clientpositive/encryption_move_tbl.q
@@ -5,16 +5,27 @@
 set hive.cli.errors.ignore=true;
 
 DROP TABLE IF EXISTS encrypted_table PURGE;
+DROP DATABASE IF EXISTS encrypted_db;
 CREATE TABLE encrypted_table (key INT, value STRING) LOCATION '${hiveconf:hive.metastore.warehouse.dir}/default/encrypted_table';
 CRYPTO CREATE_KEY --keyName key_128 --bitLength 128;
 CRYPTO CREATE_ZONE --keyName key_128 --path ${hiveconf:hive.metastore.warehouse.dir}/default/encrypted_table;
 
+CREATE DATABASE encrypted_db LOCATION '${hiveconf:hive.metastore.warehouse.dir}/encrypted_db';
+CRYPTO CREATE_KEY --keyName key_128_2 --bitLength 128;
+CRYPTO CREATE_ZONE --keyName key_128_2 --path ${hiveconf:hive.metastore.warehouse.dir}/encrypted_db;
+
 INSERT OVERWRITE TABLE encrypted_table SELECT * FROM src;
 SHOW TABLES;
+-- should fail
+ALTER TABLE default.encrypted_table RENAME TO encrypted_db.encrypted_table_2;
+SHOW TABLES;
+-- should succeed in Hadoop 2.7 but fail in 2.6  (HDFS-7530)
 ALTER TABLE default.encrypted_table RENAME TO default.plain_table;
 SHOW TABLES;
 
-DROP TABLE encrypted_table PURGE;
 
+DROP TABLE encrypted_table PURGE;
+DROP TABLE default.plain_table PURGE;
+DROP DATABASE encrypted_db;
 CRYPTO DELETE_KEY --keyName key_128;
-
+CRYPTO DELETE_KEY --keyName key_128_2;

http://git-wip-us.apache.org/repos/asf/hive/blob/cf9538b9/ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out b/ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out
index 1106880..910ce25 100644
--- a/ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out
+++ b/ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out
@@ -2,6 +2,10 @@ PREHOOK: query: DROP TABLE IF EXISTS encrypted_table PURGE
 PREHOOK: type: DROPTABLE
 POSTHOOK: query: DROP TABLE IF EXISTS encrypted_table PURGE
 POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP DATABASE IF EXISTS encrypted_db
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: DROP DATABASE IF EXISTS encrypted_db
+POSTHOOK: type: DROPDATABASE
 #### A masked pattern was here ####
 PREHOOK: type: CREATETABLE
 #### A masked pattern was here ####
@@ -14,6 +18,15 @@ POSTHOOK: Output: database:default
 POSTHOOK: Output: default@encrypted_table
 Encryption key created: 'key_128'
 Encryption zone created: '/build/ql/test/data/warehouse/default/encrypted_table' using key:
'key_128'
+#### A masked pattern was here ####
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:encrypted_db
+#### A masked pattern was here ####
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:encrypted_db
+#### A masked pattern was here ####
+Encryption key created: 'key_128_2'
+Encryption zone created: '/build/ql/test/data/warehouse/encrypted_db' using key: 'key_128_2'
 PREHOOK: query: INSERT OVERWRITE TABLE encrypted_table SELECT * FROM src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -32,11 +45,12 @@ POSTHOOK: type: SHOWTABLES
 POSTHOOK: Input: database:default
 encrypted_table
 src
-PREHOOK: query: ALTER TABLE default.encrypted_table RENAME TO default.plain_table
+PREHOOK: query: -- should fail
+ALTER TABLE default.encrypted_table RENAME TO encrypted_db.encrypted_table_2
 PREHOOK: type: ALTERTABLE_RENAME
 PREHOOK: Input: default@encrypted_table
 PREHOOK: Output: default@encrypted_table
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable
to alter table. Alter Table operation for default.encrypted_table failed to move data due
to: '/build/ql/test/data/warehouse/default/encrypted_table can't be moved from an encryption
zone.' See hive log file for details.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable
to alter table. Alter Table operation for default.encrypted_table failed to move data due
to: '/build/ql/test/data/warehouse/default/encrypted_table can't be moved from encryption
zone /build/ql/test/data/warehouse/default/encrypted_table to encryption zone /build/ql/test/data/warehouse/encrypted_db.'
See hive log file for details.
 PREHOOK: query: SHOW TABLES
 PREHOOK: type: SHOWTABLES
 PREHOOK: Input: database:default
@@ -45,11 +59,42 @@ POSTHOOK: type: SHOWTABLES
 POSTHOOK: Input: database:default
 encrypted_table
 src
-PREHOOK: query: DROP TABLE encrypted_table PURGE
-PREHOOK: type: DROPTABLE
+PREHOOK: query: -- should succeed in Hadoop 2.7 but fail in 2.6  (HDFS-7530)
+ALTER TABLE default.encrypted_table RENAME TO default.plain_table
+PREHOOK: type: ALTERTABLE_RENAME
 PREHOOK: Input: default@encrypted_table
 PREHOOK: Output: default@encrypted_table
-POSTHOOK: query: DROP TABLE encrypted_table PURGE
-POSTHOOK: type: DROPTABLE
+POSTHOOK: query: -- should succeed in Hadoop 2.7 but fail in 2.6  (HDFS-7530)
+ALTER TABLE default.encrypted_table RENAME TO default.plain_table
+POSTHOOK: type: ALTERTABLE_RENAME
 POSTHOOK: Input: default@encrypted_table
 POSTHOOK: Output: default@encrypted_table
+POSTHOOK: Output: default@plain_table
+PREHOOK: query: SHOW TABLES
+PREHOOK: type: SHOWTABLES
+PREHOOK: Input: database:default
+POSTHOOK: query: SHOW TABLES
+POSTHOOK: type: SHOWTABLES
+POSTHOOK: Input: database:default
+plain_table
+src
+PREHOOK: query: DROP TABLE encrypted_table PURGE
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE encrypted_table PURGE
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE default.plain_table PURGE
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@plain_table
+PREHOOK: Output: default@plain_table
+POSTHOOK: query: DROP TABLE default.plain_table PURGE
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@plain_table
+POSTHOOK: Output: default@plain_table
+PREHOOK: query: DROP DATABASE encrypted_db
+PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:encrypted_db
+PREHOOK: Output: database:encrypted_db
+POSTHOOK: query: DROP DATABASE encrypted_db
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Input: database:encrypted_db
+POSTHOOK: Output: database:encrypted_db

http://git-wip-us.apache.org/repos/asf/hive/blob/cf9538b9/shims/0.23/pom.xml
----------------------------------------------------------------------
diff --git a/shims/0.23/pom.xml b/shims/0.23/pom.xml
index cb1999e..d0d1d5f 100644
--- a/shims/0.23/pom.xml
+++ b/shims/0.23/pom.xml
@@ -91,6 +91,22 @@
           </exclusion>
         </exclusions>
    </dependency>
+  <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-common</artifactId>
+      <version>${hadoop.version}</version>
+      <optional>true</optional>
+          <exclusions>
+            <exclusion>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>commmons-logging</groupId>
+            <artifactId>commons-logging</artifactId>
+          </exclusion>
+        </exclusions>
+   </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-jobclient</artifactId>

http://git-wip-us.apache.org/repos/asf/hive/blob/cf9538b9/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
index 28d3e48..f712c3c 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hive.shims;
 import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.log4j.AppenderSkeleton;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.HashMap;
 import java.util.Map;
@@ -29,6 +31,7 @@ import java.util.Map;
  *
  */
 public abstract class ShimLoader {
+  private static final Logger LOG = LoggerFactory.getLogger(ShimLoader.class);
   public static String HADOOP23VERSIONNAME = "0.23";
 
   private static volatile HadoopShims hadoopShims;
@@ -92,7 +95,12 @@ public abstract class ShimLoader {
     if (hadoopShims == null) {
       synchronized (ShimLoader.class) {
         if (hadoopShims == null) {
-          hadoopShims = loadShims(HADOOP_SHIM_CLASSES, HadoopShims.class);
+          try {
+            hadoopShims = loadShims(HADOOP_SHIM_CLASSES, HadoopShims.class);
+          } catch (Throwable t) {
+            LOG.error("Error loading shims", t);
+            throw new RuntimeException(t);
+          }
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/cf9538b9/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
----------------------------------------------------------------------
diff --git a/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
b/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
index ea83125..b95cd7a 100644
--- a/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
+++ b/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
@@ -17,6 +17,20 @@
 
 package org.apache.hive.spark.client;
 
+import org.apache.hive.spark.client.JobHandle.Listener;
+
+import org.slf4j.Logger;
+
+import org.slf4j.LoggerFactory;
+
+import org.mockito.invocation.InvocationOnMock;
+
+import org.mockito.stubbing.Answer;
+
+import org.mockito.stubbing.Answer;
+
+import org.mockito.Mockito;
+
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
@@ -258,13 +272,42 @@ public class TestSparkClient {
     });
   }
 
+  private static final Logger LOG = LoggerFactory.getLogger(TestSparkClient.class);
+
   private <T extends Serializable> JobHandle.Listener<T> newListener() {
     @SuppressWarnings("unchecked")
-    JobHandle.Listener<T> listener =
-      (JobHandle.Listener<T>) mock(JobHandle.Listener.class);
+    JobHandle.Listener<T> listener = mock(JobHandle.Listener.class);
+    answerWhen(listener, "cancelled").onJobCancelled(Mockito.<JobHandle<T>>any());
+    answerWhen(listener, "queued").onJobQueued(Mockito.<JobHandle<T>>any());
+    answerWhen(listener, "started").onJobStarted(Mockito.<JobHandle<T>>any());
+    answerWhen(listener, "succeeded").onJobSucceeded(
+        Mockito.<JobHandle<T>>any(), Mockito.<T>any());
+    answerWhen(listener, "job started").onSparkJobStarted(
+        Mockito.<JobHandle<T>>any(), Mockito.anyInt());
+    Mockito.doAnswer(new Answer<Void>() {
+      public Void answer(InvocationOnMock invocation) throws Throwable {
+        @SuppressWarnings("rawtypes")
+        JobHandleImpl arg = ((JobHandleImpl)invocation.getArguments()[0]);
+        LOG.info("Job failed " + arg.getClientJobId(),
+            (Throwable)invocation.getArguments()[1]);
+        return null;
+      };
+    }).when(listener).onJobFailed(Mockito.<JobHandle<T>>any(), Mockito.<Throwable>any());
     return listener;
   }
 
+  protected <T extends Serializable> Listener<T> answerWhen(
+      Listener<T> listener, final String logStr) {
+    return Mockito.doAnswer(new Answer<Void>() {
+      public Void answer(InvocationOnMock invocation) throws Throwable {
+        @SuppressWarnings("rawtypes")
+        JobHandleImpl arg = ((JobHandleImpl)invocation.getArguments()[0]);
+        LOG.info("Job " + logStr + " " + arg.getClientJobId());
+        return null;
+      };
+    }).when(listener);
+  }
+
   private void runTest(boolean local, TestFunction test) throws Exception {
     Map<String, String> conf = createConf(local);
     SparkClientFactory.initialize(conf);


Mime
View raw message