Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id D16F6200CD3 for ; Fri, 28 Jul 2017 18:50:24 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id CFFF716D1BE; Fri, 28 Jul 2017 16:50:24 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 06FEC16D1BB for ; Fri, 28 Jul 2017 18:50:23 +0200 (CEST) Received: (qmail 40021 invoked by uid 500); 28 Jul 2017 16:50:12 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 39761 invoked by uid 99); 28 Jul 2017 16:50:11 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 28 Jul 2017 16:50:11 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 392B6F3303; Fri, 28 Jul 2017 16:50:11 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: inigoiri@apache.org To: common-commits@hadoop.apache.org Date: Fri, 28 Jul 2017 16:50:33 -0000 Message-Id: <526c9395ac49404b8ffa40539cf821ca@git.apache.org> In-Reply-To: <2575286c5d92433ba213e53a20118dba@git.apache.org> References: <2575286c5d92433ba213e53a20118dba@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [24/50] [abbrv] hadoop git commit: HADOOP-14578. Bind IPC connections to kerberos UPN host for proxy users. Contributed by Daryn Sharp. archived-at: Fri, 28 Jul 2017 16:50:25 -0000 HADOOP-14578. Bind IPC connections to kerberos UPN host for proxy users. Contributed by Daryn Sharp. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/27a1a5fd Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/27a1a5fd Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/27a1a5fd Branch: refs/heads/HDFS-10467 Commit: 27a1a5fde94d4d7ea0ed172635c146d594413781 Parents: a92bf39 Author: Kihwal Lee Authored: Wed Jul 26 13:12:39 2017 -0500 Committer: Kihwal Lee Committed: Wed Jul 26 13:12:39 2017 -0500 ---------------------------------------------------------------------- .../main/java/org/apache/hadoop/ipc/Client.java | 34 +++++---- .../java/org/apache/hadoop/ipc/TestIPC.java | 76 ++++++++++++++++++++ 2 files changed, 96 insertions(+), 14 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/27a1a5fd/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java index 1daf803..c9ac615 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java @@ -633,7 +633,8 @@ public class Client implements AutoCloseable { return false; } - private synchronized void setupConnection() throws IOException { + private synchronized void setupConnection( + UserGroupInformation ticket) throws IOException { short ioFailures = 0; short timeoutFailures = 0; while (true) { @@ -661,24 +662,26 @@ public class Client implements AutoCloseable { * client, to ensure Server matching address of the client connection * to host name in principal passed. */ - UserGroupInformation ticket = remoteId.getTicket(); + InetSocketAddress bindAddr = null; if (ticket != null && ticket.hasKerberosCredentials()) { KerberosInfo krbInfo = remoteId.getProtocol().getAnnotation(KerberosInfo.class); - if (krbInfo != null && krbInfo.clientPrincipal() != null) { - String host = - SecurityUtil.getHostFromPrincipal(remoteId.getTicket().getUserName()); - + if (krbInfo != null) { + String principal = ticket.getUserName(); + String host = SecurityUtil.getHostFromPrincipal(principal); // If host name is a valid local address then bind socket to it InetAddress localAddr = NetUtils.getLocalInetAddress(host); if (localAddr != null) { this.socket.setReuseAddress(true); - this.socket.bind(new InetSocketAddress(localAddr, 0)); + if (LOG.isDebugEnabled()) { + LOG.debug("Binding " + principal + " to " + localAddr); + } + bindAddr = new InetSocketAddress(localAddr, 0); } } } - NetUtils.connect(this.socket, server, connectionTimeout); + NetUtils.connect(this.socket, server, bindAddr, connectionTimeout); this.socket.setSoTimeout(soTimeout); return; } catch (ConnectTimeoutException toe) { @@ -762,7 +765,14 @@ public class Client implements AutoCloseable { AtomicBoolean fallbackToSimpleAuth) { if (socket != null || shouldCloseConnection.get()) { return; - } + } + UserGroupInformation ticket = remoteId.getTicket(); + if (ticket != null) { + final UserGroupInformation realUser = ticket.getRealUser(); + if (realUser != null) { + ticket = realUser; + } + } try { if (LOG.isDebugEnabled()) { LOG.debug("Connecting to "+server); @@ -774,14 +784,10 @@ public class Client implements AutoCloseable { short numRetries = 0; Random rand = null; while (true) { - setupConnection(); + setupConnection(ticket); ipcStreams = new IpcStreams(socket, maxResponseLength); writeConnectionHeader(ipcStreams); if (authProtocol == AuthProtocol.SASL) { - UserGroupInformation ticket = remoteId.getTicket(); - if (ticket.getRealUser() != null) { - ticket = ticket.getRealUser(); - } try { authMethod = ticket .doAs(new PrivilegedExceptionAction() { http://git-wip-us.apache.org/repos/asf/hadoop/blob/27a1a5fd/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java index 3416746..a4577f2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java @@ -39,9 +39,12 @@ import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Method; import java.lang.reflect.Proxy; +import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; +import java.net.SocketAddress; +import java.net.SocketException; import java.net.SocketTimeoutException; import java.util.ArrayList; import java.util.Collections; @@ -76,6 +79,7 @@ import org.apache.hadoop.ipc.Server.Connection; import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto; import org.apache.hadoop.net.ConnectTimeoutException; import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.security.KerberosInfo; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; @@ -1484,6 +1488,78 @@ public class TestIPC { Assert.fail("didn't get limit exceeded"); } + @Test + public void testUserBinding() throws Exception { + checkUserBinding(false); + } + + @Test + public void testProxyUserBinding() throws Exception { + checkUserBinding(true); + } + + private void checkUserBinding(boolean asProxy) throws Exception { + Socket s; + // don't attempt bind with no service host. + s = checkConnect(null, asProxy); + Mockito.verify(s, Mockito.never()).bind(Mockito.any(SocketAddress.class)); + + // don't attempt bind with service host not belonging to this host. + s = checkConnect("1.2.3.4", asProxy); + Mockito.verify(s, Mockito.never()).bind(Mockito.any(SocketAddress.class)); + + // do attempt bind when service host is this host. + InetAddress addr = InetAddress.getLocalHost(); + s = checkConnect(addr.getHostAddress(), asProxy); + Mockito.verify(s).bind(new InetSocketAddress(addr, 0)); + } + + // dummy protocol that claims to support kerberos. + @KerberosInfo(serverPrincipal = "server@REALM") + private static class TestBindingProtocol { + } + + private Socket checkConnect(String addr, boolean asProxy) throws Exception { + // create a fake ugi that claims to have kerberos credentials. + StringBuilder principal = new StringBuilder(); + principal.append("client"); + if (addr != null) { + principal.append("/").append(addr); + } + principal.append("@REALM"); + UserGroupInformation ugi = + spy(UserGroupInformation.createRemoteUser(principal.toString())); + Mockito.doReturn(true).when(ugi).hasKerberosCredentials(); + if (asProxy) { + ugi = UserGroupInformation.createProxyUser("proxy", ugi); + } + + // create a mock socket that throws on connect. + SocketException expectedConnectEx = + new SocketException("Expected connect failure"); + Socket s = Mockito.mock(Socket.class); + SocketFactory mockFactory = Mockito.mock(SocketFactory.class); + Mockito.doReturn(s).when(mockFactory).createSocket(); + doThrow(expectedConnectEx).when(s).connect( + Mockito.any(SocketAddress.class), Mockito.anyInt()); + + // do a dummy call and expect it to throw an exception on connect. + // tests should verify if/how a bind occurred. + try (Client client = new Client(LongWritable.class, conf, mockFactory)) { + final InetSocketAddress sockAddr = new InetSocketAddress(0); + final LongWritable param = new LongWritable(RANDOM.nextLong()); + final ConnectionId remoteId = new ConnectionId( + sockAddr, TestBindingProtocol.class, ugi, 0, + RetryPolicies.TRY_ONCE_THEN_FAIL, conf); + client.call(RPC.RpcKind.RPC_BUILTIN, param, remoteId, null); + fail("call didn't throw connect exception"); + } catch (SocketException se) { + // ipc layer re-wraps exceptions, so check the cause. + Assert.assertSame(expectedConnectEx, se.getCause()); + } + return s; + } + private void doIpcVersionTest( byte[] requestData, byte[] expectedResponse) throws IOException { --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org