hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ser...@apache.org
Subject [08/46] hive git commit: HIVE-16131 : Hive building with Hadoop 3 - additional stuff broken recently (Sergey Shelukhin, reviewed by Rui Li, Wei Zheng)
Date Mon, 13 Mar 2017 19:42:09 GMT
HIVE-16131 : Hive building with Hadoop 3 - additional stuff broken recently (Sergey Shelukhin,
reviewed by Rui Li, Wei Zheng)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b478a22e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b478a22e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b478a22e

Branch: refs/heads/hive-14535
Commit: b478a22ec0e984a789f13b04ba04225730217f54
Parents: 48e4e04
Author: Sergey Shelukhin <sershe@apache.org>
Authored: Wed Mar 8 12:01:29 2017 -0800
Committer: Sergey Shelukhin <sershe@apache.org>
Committed: Wed Mar 8 12:01:29 2017 -0800

----------------------------------------------------------------------
 .../contrib/util/typedbytes/TypedBytesWritableOutput.java     | 6 +++++-
 .../main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java | 7 ++++---
 2 files changed, 9 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/b478a22e/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableOutput.java
----------------------------------------------------------------------
diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableOutput.java
b/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableOutput.java
index 70b0d64..0da5533 100644
--- a/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableOutput.java
+++ b/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableOutput.java
@@ -24,6 +24,7 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -212,7 +213,10 @@ public class TypedBytesWritableOutput {
 
   public void writeSortedMap(SortedMapWritable smw) throws IOException {
     out.writeMapHeader(smw.size());
-    for (Map.Entry<WritableComparable, Writable> entry : smw.entrySet()) {
+    // Make sure it compiles with both Hadoop 2 and Hadoop 3.
+    Set<Map.Entry<? extends WritableComparable, Writable>> entrySet =
+      (Set<Map.Entry<? extends WritableComparable, Writable>>)((Object)smw.entrySet());
+    for (Map.Entry<? extends WritableComparable, Writable> entry : entrySet) {
       write(entry.getKey());
       write(entry.getValue());
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/b478a22e/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
----------------------------------------------------------------------
diff --git a/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java b/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
index e6af00d..21a18f8 100644
--- a/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
+++ b/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
@@ -1158,9 +1158,10 @@ public class Hadoop23Shims extends HadoopShimsSecure {
       try {
         DFSClient.class.getMethod("isHDFSEncryptionEnabled");
       } catch (NoSuchMethodException e) {
-        // the method is available since Hadoop-2.7.1
-        // if we run with an older Hadoop, check this ourselves
-        return !conf.getTrimmed(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, "").isEmpty();
+        // The method is available since Hadoop-2.7.1; if we run with an older Hadoop, check
this
+        // ourselves. Note that this setting is in turn deprected in newer versions of Hadoop,
but
+        // we only care for it in the older versions; so we will hardcode the old name here.
+        return !conf.getTrimmed("dfs.encryption.key.provider.uri", "").isEmpty();
       }
       return client.isHDFSEncryptionEnabled();
     }


Mime
View raw message