Author: suresh
Date: Tue Nov 6 16:12:43 2012
New Revision: 1406202
URL: http://svn.apache.org/viewvc?rev=1406202&view=rev
Log:
HADOOP-9004. Allow security unit tests to use external KDC. Contributed by Stephen Chu.
Added:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestStartSecureDataNode.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNodeWithExternalKdc.java
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java?rev=1406202&r1=1406201&r2=1406202&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
(original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
Tue Nov 6 16:12:43 2012
@@ -38,6 +38,8 @@ import org.mortbay.jetty.security.SslSoc
import javax.net.ssl.SSLServerSocketFactory;
+import com.google.common.annotations.VisibleForTesting;
+
/**
* Utility class to start a datanode in a secure cluster, first obtaining
* privileged resources before main startup and handing them to the datanode.
@@ -73,6 +75,25 @@ public class SecureDataNodeStarter imple
// Stash command-line arguments for regular datanode
args = context.getArguments();
+ sslFactory = new SSLFactory(SSLFactory.Mode.SERVER, conf);
+ resources = getSecureResources(sslFactory, conf);
+ }
+
+ @Override
+ public void start() throws Exception {
+ System.err.println("Starting regular datanode initialization");
+ DataNode.secureMain(args, resources);
+ }
+
+ @Override public void destroy() {
+ sslFactory.destroy();
+ }
+
+ @Override public void stop() throws Exception { /* Nothing to do */ }
+
+ @VisibleForTesting
+ public static SecureResources getSecureResources(final SSLFactory sslFactory,
+ Configuration conf) throws Exception {
// Obtain secure port for data streaming to datanode
InetSocketAddress streamingAddr = DataNode.getStreamingAddr(conf);
int socketWriteTimeout = conf.getInt(DFSConfigKeys.DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY,
@@ -85,13 +106,12 @@ public class SecureDataNodeStarter imple
// Check that we got the port we need
if (ss.getLocalPort() != streamingAddr.getPort()) {
throw new RuntimeException("Unable to bind on specified streaming port in secure "
+
- "context. Needed " + streamingAddr.getPort() + ", got " + ss.getLocalPort());
+ "context. Needed " + streamingAddr.getPort() + ", got " + ss.getLocalPort());
}
// Obtain secure listener for web server
Connector listener;
if (HttpConfig.isSecure()) {
- sslFactory = new SSLFactory(SSLFactory.Mode.SERVER, conf);
try {
sslFactory.init();
} catch (GeneralSecurityException ex) {
@@ -126,18 +146,7 @@ public class SecureDataNodeStarter imple
}
System.err.println("Opened streaming server at " + streamingAddr);
System.err.println("Opened info server at " + infoSocAddr);
- resources = new SecureResources(ss, listener);
+ return new SecureResources(ss, listener);
}
- @Override
- public void start() throws Exception {
- System.err.println("Starting regular datanode initialization");
- DataNode.secureMain(args, resources);
- }
-
- @Override public void destroy() {
- sslFactory.destroy();
- }
-
- @Override public void stop() throws Exception { /* Nothing to do */ }
}
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java?rev=1406202&r1=1406201&r2=1406202&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
(original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
Tue Nov 6 16:12:43 2012
@@ -81,6 +81,8 @@ import org.apache.hadoop.hdfs.server.com
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
import org.apache.hadoop.hdfs.server.datanode.DataStorage;
+import org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter;
+import org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter.SecureResources;
import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
@@ -95,6 +97,7 @@ import org.apache.hadoop.net.StaticMappi
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ProxyUsers;
+import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
@@ -145,6 +148,7 @@ public class MiniDFSCluster {
private boolean setupHostsFile = false;
private MiniDFSNNTopology nnTopology = null;
private boolean checkExitOnShutdown = true;
+ private boolean checkDataNodeAddrConfig = false;
private boolean checkDataNodeHostConfig = false;
public Builder(Configuration conf) {
@@ -266,6 +270,14 @@ public class MiniDFSCluster {
/**
* Default: false
*/
+ public Builder checkDataNodeAddrConfig(boolean val) {
+ this.checkDataNodeAddrConfig = val;
+ return this;
+ }
+
+ /**
+ * Default: false
+ */
public Builder checkDataNodeHostConfig(boolean val) {
this.checkDataNodeHostConfig = val;
return this;
@@ -336,6 +348,7 @@ public class MiniDFSCluster {
builder.setupHostsFile,
builder.nnTopology,
builder.checkExitOnShutdown,
+ builder.checkDataNodeAddrConfig,
builder.checkDataNodeHostConfig);
}
@@ -343,11 +356,14 @@ public class MiniDFSCluster {
DataNode datanode;
Configuration conf;
String[] dnArgs;
+ SecureResources secureResources;
- DataNodeProperties(DataNode node, Configuration conf, String[] args) {
+ DataNodeProperties(DataNode node, Configuration conf, String[] args,
+ SecureResources secureResources) {
this.datanode = node;
this.conf = conf;
this.dnArgs = args;
+ this.secureResources = secureResources;
}
}
@@ -573,7 +589,7 @@ public class MiniDFSCluster {
manageNameDfsDirs, true, manageDataDfsDirs, manageDataDfsDirs,
operation, racks, hosts,
simulatedCapacities, null, true, false,
- MiniDFSNNTopology.simpleSingleNN(nameNodePort, 0), true, false);
+ MiniDFSNNTopology.simpleSingleNN(nameNodePort, 0), true, false, false);
}
private void initMiniDFSCluster(
@@ -584,6 +600,7 @@ public class MiniDFSCluster {
String[] hosts, long[] simulatedCapacities, String clusterId,
boolean waitSafeMode, boolean setupHostsFile,
MiniDFSNNTopology nnTopology, boolean checkExitOnShutdown,
+ boolean checkDataNodeAddrConfig,
boolean checkDataNodeHostConfig)
throws IOException {
ExitUtil.disableSystemExit();
@@ -647,7 +664,7 @@ public class MiniDFSCluster {
// Start the DataNodes
startDataNodes(conf, numDataNodes, manageDataDfsDirs, operation, racks,
- hosts, simulatedCapacities, setupHostsFile, false, checkDataNodeHostConfig);
+ hosts, simulatedCapacities, setupHostsFile, checkDataNodeAddrConfig, checkDataNodeHostConfig);
waitClusterUp();
//make sure ProxyUsers uses the latest conf
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@@ -1161,7 +1178,18 @@ public class MiniDFSCluster {
if (hosts != null) {
NetUtils.addStaticResolution(hosts[i - curDatanodesNum], "localhost");
}
- DataNode dn = DataNode.instantiateDataNode(dnArgs, dnConf);
+
+ SecureResources secureResources = null;
+ if (UserGroupInformation.isSecurityEnabled()) {
+ SSLFactory sslFactory = new SSLFactory(SSLFactory.Mode.SERVER, dnConf);
+ try {
+ secureResources = SecureDataNodeStarter.getSecureResources(sslFactory, dnConf);
+ } catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+ DataNode dn = DataNode.instantiateDataNode(dnArgs, dnConf,
+ secureResources);
if(dn == null)
throw new IOException("Cannot start DataNode in "
+ dnConf.get(DFS_DATANODE_DATA_DIR_KEY));
@@ -1176,7 +1204,7 @@ public class MiniDFSCluster {
racks[i-curDatanodesNum]);
}
dn.runDatanodeDaemon();
- dataNodes.add(new DataNodeProperties(dn, newconf, dnArgs));
+ dataNodes.add(new DataNodeProperties(dn, newconf, dnArgs, secureResources));
}
curDatanodesNum += numDataNodes;
this.numDataNodes += numDataNodes;
@@ -1607,14 +1635,16 @@ public class MiniDFSCluster {
boolean keepPort) throws IOException {
Configuration conf = dnprop.conf;
String[] args = dnprop.dnArgs;
+ SecureResources secureResources = dnprop.secureResources;
Configuration newconf = new HdfsConfiguration(conf); // save cloned config
if (keepPort) {
InetSocketAddress addr = dnprop.datanode.getXferAddress();
conf.set(DFS_DATANODE_ADDRESS_KEY,
addr.getAddress().getHostAddress() + ":" + addr.getPort());
}
- dataNodes.add(new DataNodeProperties(DataNode.createDataNode(args, conf),
- newconf, args));
+ dataNodes.add(new DataNodeProperties(
+ DataNode.createDataNode(args, conf, secureResources),
+ newconf, args, secureResources));
numDataNodes++;
return true;
}
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestStartSecureDataNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestStartSecureDataNode.java?rev=1406202&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestStartSecureDataNode.java
(added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestStartSecureDataNode.java
Tue Nov 6 16:12:43 2012
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdfs.server.namenode;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import static org.apache.hadoop.security.SecurityUtilTestHelper.isExternalKdcRunning;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * This test starts a 1 NameNode 1 DataNode MiniDFSCluster with
+ * kerberos authentication enabled using user-specified KDC,
+ * principals, and keytabs.
+ *
+ * A secure DataNode has to be started by root, so this test needs to
+ * be run by root.
+ *
+ * To run, users must specify the following system properties:
+ * externalKdc=true
+ * java.security.krb5.conf
+ * dfs.namenode.kerberos.principal
+ * dfs.namenode.kerberos.internal.spnego.principal
+ * dfs.namenode.keytab.file
+ * dfs.datanode.kerberos.principal
+ * dfs.datanode.keytab.file
+ */
+public class TestStartSecureDataNode {
+ final static private int NUM_OF_DATANODES = 1;
+
+ @Before
+ public void testExternalKdcRunning() {
+ // Tests are skipped if external KDC is not running.
+ Assume.assumeTrue(isExternalKdcRunning());
+ }
+
+ @Test
+ public void testSecureNameNode() throws IOException, InterruptedException {
+ MiniDFSCluster cluster = null;
+ try {
+ String nnPrincipal =
+ System.getProperty("dfs.namenode.kerberos.principal");
+ String nnSpnegoPrincipal =
+ System.getProperty("dfs.namenode.kerberos.internal.spnego.principal");
+ String nnKeyTab = System.getProperty("dfs.namenode.keytab.file");
+ assertNotNull("NameNode principal was not specified", nnPrincipal);
+ assertNotNull("NameNode SPNEGO principal was not specified",
+ nnSpnegoPrincipal);
+ assertNotNull("NameNode keytab was not specified", nnKeyTab);
+
+ String dnPrincipal = System.getProperty("dfs.datanode.kerberos.principal");
+ String dnKeyTab = System.getProperty("dfs.datanode.keytab.file");
+ assertNotNull("DataNode principal was not specified", dnPrincipal);
+ assertNotNull("DataNode keytab was not specified", dnKeyTab);
+
+ Configuration conf = new HdfsConfiguration();
+ conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
+ "kerberos");
+ conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, nnPrincipal);
+ conf.set(DFSConfigKeys.DFS_NAMENODE_INTERNAL_SPNEGO_USER_NAME_KEY,
+ nnSpnegoPrincipal);
+ conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, nnKeyTab);
+ conf.set(DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY, dnPrincipal);
+ conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, dnKeyTab);
+ // Secure DataNode requires using ports lower than 1024.
+ conf.set(DFSConfigKeys.DFS_DATANODE_ADDRESS_KEY, "127.0.0.1:1004");
+ conf.set(DFSConfigKeys.DFS_DATANODE_HTTP_ADDRESS_KEY, "127.0.0.1:1006");
+ conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, "700");
+
+ cluster = new MiniDFSCluster.Builder(conf)
+ .numDataNodes(NUM_OF_DATANODES)
+ .checkDataNodeAddrConfig(true)
+ .build();
+ cluster.waitActive();
+ assertTrue(cluster.isDataNodeUp());
+
+ } catch (Exception ex) {
+ ex.printStackTrace();
+ } finally {
+ if (cluster != null) {
+ cluster.shutdown();
+ }
+ }
+ }
+}
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNodeWithExternalKdc.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNodeWithExternalKdc.java?rev=1406202&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNodeWithExternalKdc.java
(added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNodeWithExternalKdc.java
Tue Nov 6 16:12:43 2012
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdfs.server.namenode;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import static org.apache.hadoop.security.SecurityUtilTestHelper.isExternalKdcRunning;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * This test brings up a MiniDFSCluster with 1 NameNode and 0
+ * DataNodes with kerberos authentication enabled using user-specified
+ * KDC, principals, and keytabs.
+ *
+ * To run, users must specify the following system properties:
+ * externalKdc=true
+ * java.security.krb5.conf
+ * dfs.namenode.kerberos.principal
+ * dfs.namenode.kerberos.internal.spnego.principal
+ * dfs.namenode.keytab.file
+ * user.principal (do not specify superuser!)
+ * user.keytab
+ */
+public class TestSecureNameNodeWithExternalKdc {
+ final static private int NUM_OF_DATANODES = 0;
+
+ @Before
+ public void testExternalKdcRunning() {
+ // Tests are skipped if external KDC is not running.
+ Assume.assumeTrue(isExternalKdcRunning());
+ }
+
+ @Test
+ public void testSecureNameNode() throws IOException, InterruptedException {
+ MiniDFSCluster cluster = null;
+ try {
+ String nnPrincipal =
+ System.getProperty("dfs.namenode.kerberos.principal");
+ String nnSpnegoPrincipal =
+ System.getProperty("dfs.namenode.kerberos.internal.spnego.principal");
+ String nnKeyTab = System.getProperty("dfs.namenode.keytab.file");
+ assertNotNull("NameNode principal was not specified", nnPrincipal);
+ assertNotNull("NameNode SPNEGO principal was not specified",
+ nnSpnegoPrincipal);
+ assertNotNull("NameNode keytab was not specified", nnKeyTab);
+
+ Configuration conf = new HdfsConfiguration();
+ conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
+ "kerberos");
+ conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, nnPrincipal);
+ conf.set(DFSConfigKeys.DFS_NAMENODE_INTERNAL_SPNEGO_USER_NAME_KEY,
+ nnSpnegoPrincipal);
+ conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, nnKeyTab);
+
+ cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_OF_DATANODES)
+ .build();
+ final MiniDFSCluster clusterRef = cluster;
+ cluster.waitActive();
+ FileSystem fsForCurrentUser = cluster.getFileSystem();
+ fsForCurrentUser.mkdirs(new Path("/tmp"));
+ fsForCurrentUser.setPermission(new Path("/tmp"), new FsPermission(
+ (short) 511));
+
+ // The user specified should not be a superuser
+ String userPrincipal = System.getProperty("user.principal");
+ String userKeyTab = System.getProperty("user.keytab");
+ assertNotNull("User principal was not specified", userPrincipal);
+ assertNotNull("User keytab was not specified", userKeyTab);
+
+ UserGroupInformation ugi = UserGroupInformation
+ .loginUserFromKeytabAndReturnUGI(userPrincipal, userKeyTab);
+ FileSystem fs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
+ @Override
+ public FileSystem run() throws Exception {
+ return clusterRef.getFileSystem();
+ }
+ });
+ try {
+ Path p = new Path("/users");
+ fs.mkdirs(p);
+ fail("User must not be allowed to write in /");
+ } catch (IOException expected) {
+ }
+
+ Path p = new Path("/tmp/alpha");
+ fs.mkdirs(p);
+ assertNotNull(fs.listStatus(p));
+ assertEquals(AuthenticationMethod.KERBEROS,
+ ugi.getAuthenticationMethod());
+ } finally {
+ if (cluster != null) {
+ cluster.shutdown();
+ }
+ }
+ }
+}
|