Return-Path: X-Original-To: apmail-hive-commits-archive@www.apache.org Delivered-To: apmail-hive-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 61A93109B5 for ; Thu, 18 Dec 2014 22:52:48 +0000 (UTC) Received: (qmail 49463 invoked by uid 500); 18 Dec 2014 22:52:48 -0000 Delivered-To: apmail-hive-commits-archive@hive.apache.org Received: (qmail 49420 invoked by uid 500); 18 Dec 2014 22:52:48 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 49397 invoked by uid 99); 18 Dec 2014 22:52:48 -0000 Received: from eris.apache.org (HELO hades.apache.org) (140.211.11.105) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 18 Dec 2014 22:52:48 +0000 Received: from hades.apache.org (localhost [127.0.0.1]) by hades.apache.org (ASF Mail Server at hades.apache.org) with ESMTP id 3CA79AC08CC; Thu, 18 Dec 2014 22:52:46 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1646564 - /hive/branches/HIVE-8065/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Date: Thu, 18 Dec 2014 22:52:45 -0000 To: commits@hive.apache.org From: brock@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20141218225247.3CA79AC08CC@hades.apache.org> Author: brock Date: Thu Dec 18 22:52:45 2014 New Revision: 1646564 URL: http://svn.apache.org/r1646564 Log: HIVE-9165 - Revert HIVE-8604 that uses the non-implemented KeyProviderFactory.get() method. (Sergio Pena via Brock) Modified: hive/branches/HIVE-8065/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Modified: hive/branches/HIVE-8065/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1646564&r1=1646563&r2=1646564&view=diff ============================================================================== --- hive/branches/HIVE-8065/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original) +++ hive/branches/HIVE-8065/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Thu Dec 18 22:52:45 2014 @@ -24,7 +24,6 @@ import java.lang.reflect.Method; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.URI; -import java.net.URISyntaxException; import java.security.AccessControlException; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; @@ -747,7 +746,6 @@ public class Hadoop23Shims extends Hadoo ret.put("HADOOPSPECULATIVEEXECREDUCERS", "mapreduce.reduce.speculative"); ret.put("MAPREDSETUPCLEANUPNEEDED", "mapreduce.job.committer.setup.cleanup.needed"); ret.put("MAPREDTASKCLEANUPNEEDED", "mapreduce.job.committer.task.cleanup.needed"); - ret.put("HADOOPSECURITYKEYPROVIDER", "dfs.encryption.key.provider.uri"); return ret; } @@ -951,18 +949,13 @@ public class Hadoop23Shims extends Hadoo public HdfsEncryptionShim(URI uri, Configuration conf) throws IOException { hdfsAdmin = new HdfsAdmin(uri, conf); - // We get the key provider via the MiniDFSCluster in the test and in the product - // environment we get the key provider via the key provider factory. if (keyProvider == null) { try { - String keyProviderPath = conf - .get(ShimLoader.getHadoopShims().getHadoopConfNames().get("HADOOPSECURITYKEYPROVIDER"), - null); - if (keyProviderPath != null) { - keyProvider = KeyProviderFactory.get(new URI(keyProviderPath), conf); + // We use the first key provider found in the list of key providers. We don't know + // what to do with the rest, so let's skip them. + if (keyProvider == null) { + keyProvider = KeyProviderFactory.getProviders(conf).get(0); } - } catch (URISyntaxException e) { - throw new IOException("Invalid HDFS security key provider path", e); } catch (Exception e) { throw new IOException("Cannot create HDFS security object: ", e); }