accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From els...@apache.org
Subject [1/4] accumulo git commit: ACCUMULO-2815 Support for Kerberos client authentication.
Date Thu, 15 Jan 2015 17:51:37 GMT
Repository: accumulo
Updated Branches:
  refs/heads/master 8dc68b97a -> 4f19aa1f8


http://git-wip-us.apache.org/repos/asf/accumulo/blob/4f19aa1f/test/src/test/java/org/apache/accumulo/harness/MiniClusterHarness.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/accumulo/harness/MiniClusterHarness.java b/test/src/test/java/org/apache/accumulo/harness/MiniClusterHarness.java
index abdb627..06b4303 100644
--- a/test/src/test/java/org/apache/accumulo/harness/MiniClusterHarness.java
+++ b/test/src/test/java/org/apache/accumulo/harness/MiniClusterHarness.java
@@ -21,16 +21,24 @@ import java.io.File;
 import java.io.FileOutputStream;
 import java.io.OutputStream;
 import java.util.Map;
+import java.util.UUID;
 import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
+import org.apache.accumulo.core.client.security.tokens.KerberosToken;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.conf.Property;
 import org.apache.accumulo.minicluster.impl.MiniAccumuloClusterImpl;
 import org.apache.accumulo.minicluster.impl.MiniAccumuloConfigImpl;
+import org.apache.accumulo.server.security.handler.KerberosAuthenticator;
+import org.apache.accumulo.server.security.handler.KerberosAuthorizor;
+import org.apache.accumulo.server.security.handler.KerberosPermissionHandler;
 import org.apache.accumulo.test.functional.NativeMapIT;
 import org.apache.accumulo.test.util.CertUtils;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Charsets;
 import com.google.common.base.Preconditions;
@@ -39,11 +47,16 @@ import com.google.common.base.Preconditions;
  * Harness that sets up a MiniAccumuloCluster in a manner expected for Accumulo integration tests.
  */
 public class MiniClusterHarness {
+  private static final Logger log = LoggerFactory.getLogger(MiniClusterHarness.class);
 
   private static final AtomicLong COUNTER = new AtomicLong(0);
 
   public static final String USE_SSL_FOR_IT_OPTION = "org.apache.accumulo.test.functional.useSslForIT",
-      USE_CRED_PROVIDER_FOR_IT_OPTION = "org.apache.accumulo.test.functional.useCredProviderForIT", TRUE = Boolean.toString(true);
+      USE_CRED_PROVIDER_FOR_IT_OPTION = "org.apache.accumulo.test.functional.useCredProviderForIT",
+      USE_KERBEROS_FOR_IT_OPTION = "org.apache.accumulo.test.functional.useKrbForIT", TRUE = Boolean.toString(true);
+
+  // TODO These are defined in MiniKdc >= 2.6.0
+  public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf", SUN_SECURITY_KRB5_DEBUG = "sun.security.krb5.debug";
 
   /**
    * Create a MiniAccumuloCluster using the given Token as the credentials for the root user.
@@ -56,35 +69,54 @@ public class MiniClusterHarness {
     return create(testBase.getClass().getName(), testBase.testName.getMethodName(), token);
   }
 
-  public MiniAccumuloClusterImpl create(AccumuloClusterIT testBase, AuthenticationToken token) throws Exception {
-    return create(testBase.getClass().getName(), testBase.testName.getMethodName(), token, testBase);
+  public MiniAccumuloClusterImpl create(AccumuloIT testBase, AuthenticationToken token, TestingKdc kdc) throws Exception {
+    return create(testBase.getClass().getName(), testBase.testName.getMethodName(), token, kdc);
+  }
+
+  public MiniAccumuloClusterImpl create(AccumuloClusterIT testBase, AuthenticationToken token, TestingKdc kdc) throws Exception {
+    return create(testBase.getClass().getName(), testBase.testName.getMethodName(), token, testBase, kdc);
   }
 
   public MiniAccumuloClusterImpl create(AccumuloClusterIT testBase, AuthenticationToken token, MiniClusterConfigurationCallback callback) throws Exception {
-    return create(testBase.getClass().getName(), testBase.testName.getMethodName(), token, testBase);
+    return create(testBase.getClass().getName(), testBase.testName.getMethodName(), token, callback);
   }
 
   public MiniAccumuloClusterImpl create(String testClassName, String testMethodName, AuthenticationToken token) throws Exception {
     return create(testClassName, testMethodName, token, MiniClusterConfigurationCallback.NO_CALLBACK);
   }
 
+  public MiniAccumuloClusterImpl create(String testClassName, String testMethodName, AuthenticationToken token, TestingKdc kdc) throws Exception {
+    return create(testClassName, testMethodName, token, MiniClusterConfigurationCallback.NO_CALLBACK, kdc);
+  }
+
   public MiniAccumuloClusterImpl create(String testClassName, String testMethodName, AuthenticationToken token, MiniClusterConfigurationCallback configCallback)
       throws Exception {
+    return create(testClassName, testMethodName, token, configCallback, null);
+  }
+
+  public MiniAccumuloClusterImpl create(String testClassName, String testMethodName, AuthenticationToken token,
+      MiniClusterConfigurationCallback configCallback, TestingKdc kdc) throws Exception {
     Preconditions.checkNotNull(token);
-    Preconditions.checkArgument(PasswordToken.class.isAssignableFrom(token.getClass()));
+    Preconditions.checkArgument(token instanceof PasswordToken || token instanceof KerberosToken, "A PasswordToken or KerberosToken is required");
 
-    String passwd = new String(((PasswordToken) token).getPassword(), Charsets.UTF_8);
-    MiniAccumuloConfigImpl cfg = new MiniAccumuloConfigImpl(AccumuloClusterIT.createTestDir(testClassName + "_" + testMethodName), passwd);
+    String rootPasswd;
+    if (token instanceof PasswordToken) {
+      rootPasswd = new String(((PasswordToken) token).getPassword(), Charsets.UTF_8);
+    } else {
+      rootPasswd = UUID.randomUUID().toString();
+    }
+
+    MiniAccumuloConfigImpl cfg = new MiniAccumuloConfigImpl(AccumuloClusterIT.createTestDir(testClassName + "_" + testMethodName), rootPasswd);
 
     // Enable native maps by default
     cfg.setNativeLibPaths(NativeMapIT.nativeMapLocation().getAbsolutePath());
     cfg.setProperty(Property.TSERV_NATIVEMAP_ENABLED, Boolean.TRUE.toString());
 
-    // Setup SSL and credential providers if the properties request such
-    configureForEnvironment(cfg, getClass(), AccumuloClusterIT.createSharedTestDir(this.getClass().getName() + "-ssl"));
-
     Configuration coreSite = new Configuration(false);
 
+    // Setup SSL and credential providers if the properties request such
+    configureForEnvironment(cfg, getClass(), AccumuloClusterIT.createSharedTestDir(this.getClass().getName() + "-ssl"), coreSite, kdc);
+
     // Invoke the callback for tests to configure MAC before it starts
     configCallback.configureMiniCluster(cfg, coreSite);
 
@@ -104,13 +136,25 @@ public class MiniClusterHarness {
     return miniCluster;
   }
 
-  protected void configureForEnvironment(MiniAccumuloConfigImpl cfg, Class<?> testClass, File folder) {
+  protected void configureForEnvironment(MiniAccumuloConfigImpl cfg, Class<?> testClass, File folder, Configuration coreSite, TestingKdc kdc) {
     if (TRUE.equals(System.getProperty(USE_SSL_FOR_IT_OPTION))) {
       configureForSsl(cfg, folder);
     }
     if (TRUE.equals(System.getProperty(USE_CRED_PROVIDER_FOR_IT_OPTION))) {
       cfg.setUseCredentialProvider(true);
     }
+
+    if (TRUE.equals(System.getProperty(USE_KERBEROS_FOR_IT_OPTION))) {
+      if (TRUE.equals(System.getProperty(USE_SSL_FOR_IT_OPTION))) {
+        throw new RuntimeException("Cannot use both SSL and Kerberos");
+      }
+
+      try {
+        configureForKerberos(cfg, folder, coreSite, kdc);
+      } catch (Exception e) {
+        throw new RuntimeException("Failed to initialize KDC", e);
+      }
+    }
   }
 
   protected void configureForSsl(MiniAccumuloConfigImpl cfg, File folder) {
@@ -141,4 +185,44 @@ public class MiniClusterHarness {
     siteConfig.put(Property.RPC_SSL_TRUSTSTORE_PASSWORD.getKey(), truststorePassword);
     cfg.setSiteConfig(siteConfig);
   }
+
+  protected void configureForKerberos(MiniAccumuloConfigImpl cfg, File folder, Configuration coreSite, TestingKdc kdc) throws Exception {
+    Map<String,String> siteConfig = cfg.getSiteConfig();
+    if (TRUE.equals(siteConfig.get(Property.INSTANCE_RPC_SSL_ENABLED.getKey()))) {
+      throw new RuntimeException("Cannot use both SSL and SASL/Kerberos");
+    }
+
+    if (TRUE.equals(siteConfig.get(Property.INSTANCE_RPC_SASL_ENABLED.getKey()))) {
+      // already enabled
+      return;
+    }
+
+    if (null == kdc) {
+      throw new IllegalStateException("MiniClusterKdc was null");
+    }
+
+    log.info("Enabling Kerberos/SASL for minicluster");
+
+    // Turn on SASL and set the keytab/principal information
+    cfg.setProperty(Property.INSTANCE_RPC_SASL_ENABLED, "true");
+    cfg.setProperty(Property.GENERAL_KERBEROS_KEYTAB, kdc.getAccumuloKeytab().getAbsolutePath());
+    cfg.setProperty(Property.GENERAL_KERBEROS_PRINCIPAL, kdc.getAccumuloPrincipal());
+    cfg.setProperty(Property.INSTANCE_SECURITY_AUTHENTICATOR, KerberosAuthenticator.class.getName());
+    cfg.setProperty(Property.INSTANCE_SECURITY_AUTHORIZOR, KerberosAuthorizor.class.getName());
+    cfg.setProperty(Property.INSTANCE_SECURITY_PERMISSION_HANDLER, KerberosPermissionHandler.class.getName());
+    // Piggy-back on the "system user" credential, but use it as a normal KerberosToken, not the SystemToken.
+    cfg.setProperty(Property.TRACE_USER, kdc.getAccumuloPrincipal());
+    cfg.setProperty(Property.TRACE_TOKEN_TYPE, KerberosToken.CLASS_NAME);
+
+    // Pass down some KRB5 debug properties
+    Map<String,String> systemProperties = cfg.getSystemProperties();
+    systemProperties.put(JAVA_SECURITY_KRB5_CONF, System.getProperty(JAVA_SECURITY_KRB5_CONF, ""));
+    systemProperties.put(SUN_SECURITY_KRB5_DEBUG, System.getProperty(SUN_SECURITY_KRB5_DEBUG, "false"));
+    cfg.setSystemProperties(systemProperties);
+
+    // Make sure UserGroupInformation will do the correct login
+    coreSite.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
+
+    cfg.setRootUserName(kdc.getClientPrincipal());
+  }
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/4f19aa1f/test/src/test/java/org/apache/accumulo/harness/SharedMiniClusterIT.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/accumulo/harness/SharedMiniClusterIT.java b/test/src/test/java/org/apache/accumulo/harness/SharedMiniClusterIT.java
index 2380f66..c844388 100644
--- a/test/src/test/java/org/apache/accumulo/harness/SharedMiniClusterIT.java
+++ b/test/src/test/java/org/apache/accumulo/harness/SharedMiniClusterIT.java
@@ -21,10 +21,16 @@ import java.util.Random;
 
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
+import org.apache.accumulo.core.client.security.tokens.KerberosToken;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.minicluster.impl.MiniAccumuloClusterImpl;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Convenience class which starts a single MAC instance for a test to leverage.
@@ -34,10 +40,14 @@ import org.junit.BeforeClass;
  * can't expose any information to tell the base class that it is to perform the one-MAC-per-class semantics.
  */
 public abstract class SharedMiniClusterIT extends AccumuloIT {
+  private static final Logger log = LoggerFactory.getLogger(SharedMiniClusterIT.class);
+  private static final String TRUE = Boolean.toString(true);
 
+  private static String principal = "root";
   private static String rootPassword;
   private static AuthenticationToken token;
   private static MiniAccumuloClusterImpl cluster;
+  private static TestingKdc krb;
 
   @BeforeClass
   public static void startMiniCluster() throws Exception {
@@ -47,17 +57,42 @@ public abstract class SharedMiniClusterIT extends AccumuloIT {
     // Make a shared MAC instance instead of spinning up one per test method
     MiniClusterHarness harness = new MiniClusterHarness();
 
-    rootPassword = "rootPasswordShared1";
-    token = new PasswordToken(rootPassword);
+    if (TRUE.equals(System.getProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION))) {
+      krb = new TestingKdc();
+      krb.start();
+      // Enabled krb auth
+      Configuration conf = new Configuration(false);
+      conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
+      UserGroupInformation.setConfiguration(conf);
+      // Login as the client
+      UserGroupInformation.loginUserFromKeytab(krb.getClientPrincipal(), krb.getClientKeytab().getAbsolutePath());
+      // Get the krb token
+      principal = krb.getClientPrincipal();
+      token = new KerberosToken(principal);
+    } else {
+      rootPassword = "rootPasswordShared1";
+      token = new PasswordToken(rootPassword);
+    }
 
-    cluster = harness.create(SharedMiniClusterIT.class.getName(), System.currentTimeMillis() + "_" + new Random().nextInt(Short.MAX_VALUE), token);
+    cluster = harness.create(SharedMiniClusterIT.class.getName(), System.currentTimeMillis() + "_" + new Random().nextInt(Short.MAX_VALUE), token, krb);
     cluster.start();
   }
 
   @AfterClass
   public static void stopMiniCluster() throws Exception {
     if (null != cluster) {
-      cluster.stop();
+      try {
+        cluster.stop();
+      } catch (Exception e) {
+        log.error("Failed to stop minicluster", e);
+      }
+    }
+    if (null != krb) {
+      try {
+        krb.stop();
+      } catch (Exception e) {
+        log.error("Failed to stop KDC", e);
+      }
     }
   }
 
@@ -79,7 +114,7 @@ public abstract class SharedMiniClusterIT extends AccumuloIT {
 
   public static Connector getConnector() {
     try {
-      return getCluster().getConnector("root", getToken());
+      return getCluster().getConnector(principal, getToken());
     } catch (Exception e) {
       throw new RuntimeException(e);
     }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/4f19aa1f/test/src/test/java/org/apache/accumulo/harness/TestingKdc.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/accumulo/harness/TestingKdc.java b/test/src/test/java/org/apache/accumulo/harness/TestingKdc.java
new file mode 100644
index 0000000..2abdc62
--- /dev/null
+++ b/test/src/test/java/org/apache/accumulo/harness/TestingKdc.java
@@ -0,0 +1,165 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.harness;
+
+import java.io.File;
+import java.net.InetAddress;
+import java.util.Properties;
+
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.junit.Assert;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * Creates a {@link MiniKdc} for tests to use to exercise secure Accumulo
+ */
+public class TestingKdc {
+  private static final Logger log = LoggerFactory.getLogger(TestingKdc.class);
+
+  protected MiniKdc kdc = null;
+  protected File accumuloKeytab = null, clientKeytab = null;
+  protected String accumuloPrincipal = null, clientPrincipal = null;
+
+  public final String ORG_NAME = "EXAMPLE", ORG_DOMAIN = "COM";
+
+  private String hostname;
+  private File keytabDir;
+  private boolean started = false;
+
+  public TestingKdc() throws Exception {
+    File targetDir = new File(System.getProperty("user.dir"), "target");
+    Assert.assertTrue("Could not find Maven target directory: " + targetDir, targetDir.exists() && targetDir.isDirectory());
+
+    // Create the directories: target/kerberos/{keytabs,minikdc}
+    File krbDir = new File(targetDir, "kerberos"), kdcDir = new File(krbDir, "minikdc");
+    keytabDir = new File(krbDir, "keytabs");
+
+    keytabDir.mkdirs();
+    kdcDir.mkdirs();
+
+    hostname = InetAddress.getLocalHost().getCanonicalHostName();
+
+    Properties kdcConf = MiniKdc.createConf();
+    kdcConf.setProperty(MiniKdc.ORG_NAME, ORG_NAME);
+    kdcConf.setProperty(MiniKdc.ORG_DOMAIN, ORG_DOMAIN);
+    kdc = new MiniKdc(kdcConf, kdcDir);
+  }
+
+  /**
+   * Starts the KDC and creates the principals and their keytabs
+   */
+  public synchronized void start() throws Exception {
+    Preconditions.checkArgument(!started, "KDC was already started");
+    kdc.start();
+
+    accumuloKeytab = new File(keytabDir, "accumulo.keytab");
+    clientKeytab = new File(keytabDir, "client.keytab");
+
+    accumuloPrincipal = String.format("accumulo/%s", hostname);
+    clientPrincipal = "client";
+
+    log.info("Creating Kerberos principal {} with keytab {}", accumuloPrincipal, accumuloKeytab);
+    kdc.createPrincipal(accumuloKeytab, accumuloPrincipal);
+    log.info("Creating Kerberos principal {} with keytab {}", clientPrincipal, clientKeytab);
+    kdc.createPrincipal(clientKeytab, clientPrincipal);
+
+    accumuloPrincipal = qualifyUser(accumuloPrincipal);
+    clientPrincipal = qualifyUser(clientPrincipal);
+
+    started = true;
+  }
+
+  public synchronized void stop() throws Exception {
+    Preconditions.checkArgument(started, "KDC is not started");
+    kdc.stop();
+    started = false;
+  }
+
+  /**
+   * A directory where the automatically-created keytab files are written
+   */
+  public File getKeytabDir() {
+    return keytabDir;
+  }
+
+  /**
+   * A Kerberos keytab for the Accumulo server processes
+   */
+  public File getAccumuloKeytab() {
+    Preconditions.checkArgument(started, "Accumulo keytab is not initialized, is the KDC started?");
+    return accumuloKeytab;
+  }
+
+  /**
+   * The corresponding principal for the Accumulo service keytab
+   */
+  public String getAccumuloPrincipal() {
+    Preconditions.checkArgument(started, "Accumulo principal is not initialized, is the KDC started?");
+    return accumuloPrincipal;
+  }
+
+  /**
+   * A Kerberos keytab for client use
+   */
+  public File getClientKeytab() {
+    Preconditions.checkArgument(started, "Client keytab is not initialized, is the KDC started?");
+    return clientKeytab;
+  }
+
+  /**
+   * The corresponding principal for the client keytab
+   */
+  public String getClientPrincipal() {
+    Preconditions.checkArgument(started, "Client principal is not initialized, is the KDC started?");
+    return clientPrincipal;
+  }
+
+  /**
+   * @see MiniKdc#createPrincipal(File, String...)
+   */
+  public void createPrincipal(File keytabFile, String... principals) throws Exception {
+    Preconditions.checkArgument(started, "KDC is not started");
+    kdc.createPrincipal(keytabFile, principals);
+  }
+
+  /**
+   * @return the name for the realm
+   */
+  public String getOrgName() {
+    return ORG_NAME;
+  }
+
+    /**
+   * @return the domain for the realm
+   */
+  public String getOrgDomain() {
+    return ORG_DOMAIN;
+  }
+
+  /**
+   * Qualify a username (only the primary from the kerberos principal) with the proper realm
+   *
+   * @param primary
+   *          The primary or primary and instance
+   */
+  public String qualifyUser(String primary) {
+    return String.format("%s@%s.%s", primary, getOrgName(), getOrgDomain());
+  }
+}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/4f19aa1f/test/src/test/java/org/apache/accumulo/harness/conf/AccumuloMiniClusterConfiguration.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/accumulo/harness/conf/AccumuloMiniClusterConfiguration.java b/test/src/test/java/org/apache/accumulo/harness/conf/AccumuloMiniClusterConfiguration.java
index 11b7530..0efba9e 100644
--- a/test/src/test/java/org/apache/accumulo/harness/conf/AccumuloMiniClusterConfiguration.java
+++ b/test/src/test/java/org/apache/accumulo/harness/conf/AccumuloMiniClusterConfiguration.java
@@ -16,23 +16,37 @@
  */
 package org.apache.accumulo.harness.conf;
 
+import java.io.File;
+import java.io.IOException;
 import java.util.Map;
 
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
+import org.apache.accumulo.core.client.security.tokens.KerberosToken;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
+import org.apache.accumulo.harness.AccumuloClusterIT;
 import org.apache.accumulo.harness.AccumuloClusterIT.ClusterType;
+import org.apache.accumulo.harness.MiniClusterHarness;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.util.KerberosName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Extract configuration properties for a MiniAccumuloCluster from Java properties
  */
 public class AccumuloMiniClusterConfiguration extends AccumuloClusterPropertyConfiguration {
+  private static final Logger log = LoggerFactory.getLogger(AccumuloMiniClusterConfiguration.class);
+  private static final String TRUE = Boolean.toString(true);
 
   public static final String ACCUMULO_MINI_PRINCIPAL_KEY = ACCUMULO_MINI_PREFIX + "principal";
   public static final String ACCUMULO_MINI_PRINCIPAL_DEFAULT = "root";
   public static final String ACCUMULO_MINI_PASSWORD_KEY = ACCUMULO_MINI_PREFIX + "password";
   public static final String ACCUMULO_MINI_PASSWORD_DEFAULT = "rootPassword1";
 
-  private Map<String,String> conf;
+  private final Map<String,String> conf;
+  private final boolean saslEnabled;
 
   public AccumuloMiniClusterConfiguration() {
     ClusterType type = getClusterType();
@@ -41,26 +55,52 @@ public class AccumuloMiniClusterConfiguration extends AccumuloClusterPropertyCon
     }
 
     this.conf = getConfiguration(type);
+    this.saslEnabled = TRUE.equals(System.getProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION));
+    log.debug("SASL is {}enabled", (saslEnabled ? "" : "not "));
   }
 
   @Override
   public String getPrincipal() {
-    String principal = conf.get(ACCUMULO_MINI_PRINCIPAL_KEY);
-    if (null == principal) {
-      principal = ACCUMULO_MINI_PRINCIPAL_DEFAULT;
-    }
+    if (saslEnabled) {
+      try {
+        return new KerberosName(AccumuloClusterIT.getClientPrincipal()).getShortName();
+      } catch (IOException e) {
+        throw new RuntimeException("Could not parse client principal", e);
+      }
+    } else {
+      String principal = conf.get(ACCUMULO_MINI_PRINCIPAL_KEY);
+      if (null == principal) {
+        principal = ACCUMULO_MINI_PRINCIPAL_DEFAULT;
+      }
 
-    return principal;
+      return principal;
+    }
   }
 
   @Override
   public AuthenticationToken getToken() {
-    String password = conf.get(ACCUMULO_MINI_PASSWORD_KEY);
-    if (null == password) {
-      password = ACCUMULO_MINI_PASSWORD_DEFAULT;
-    }
+    if (saslEnabled) {
+      // Turn on Kerberos authentication so UGI acts properly
+      final Configuration conf = new Configuration(false);
+      conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
+      UserGroupInformation.setConfiguration(conf);
 
-    return new PasswordToken(password);
+      File clientKeytab = AccumuloClusterIT.getClientKeytab();
+      String clientPrincipal = AccumuloClusterIT.getClientPrincipal();
+      try {
+        UserGroupInformation.loginUserFromKeytab(clientPrincipal, clientKeytab.getAbsolutePath());
+        return new KerberosToken(clientPrincipal);
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+    } else {
+      String password = conf.get(ACCUMULO_MINI_PASSWORD_KEY);
+      if (null == password) {
+        password = ACCUMULO_MINI_PASSWORD_DEFAULT;
+      }
+
+      return new PasswordToken(password);
+    }
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/accumulo/blob/4f19aa1f/test/src/test/java/org/apache/accumulo/server/security/SystemCredentialsIT.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/accumulo/server/security/SystemCredentialsIT.java b/test/src/test/java/org/apache/accumulo/server/security/SystemCredentialsIT.java
index abbe5e6..3889110 100644
--- a/test/src/test/java/org/apache/accumulo/server/security/SystemCredentialsIT.java
+++ b/test/src/test/java/org/apache/accumulo/server/security/SystemCredentialsIT.java
@@ -30,6 +30,7 @@ import org.apache.accumulo.core.client.Scanner;
 import org.apache.accumulo.core.client.TableNotFoundException;
 import org.apache.accumulo.core.client.security.SecurityErrorCode;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
+import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.metadata.RootTable;
@@ -41,7 +42,7 @@ import org.junit.Test;
 
 public class SystemCredentialsIT extends ConfigurableMacIT {
 
-  private static final int FAIL_CODE = 7;
+  private static final int FAIL_CODE = 7, BAD_PASSWD_FAIL_CODE = 8;
 
   @Override
   protected int defaultTimeoutSeconds() {
@@ -52,6 +53,7 @@ public class SystemCredentialsIT extends ConfigurableMacIT {
   public void testSystemCredentials() throws Exception {
     assertEquals(0, exec(SystemCredentialsIT.class, "good", getCluster().getZooKeepers()).waitFor());
     assertEquals(FAIL_CODE, exec(SystemCredentialsIT.class, "bad", getCluster().getZooKeepers()).waitFor());
+    assertEquals(BAD_PASSWD_FAIL_CODE, exec(SystemCredentialsIT.class, "bad_password", getCluster().getZooKeepers()).waitFor());
   }
 
   public static void main(final String[] args) throws AccumuloException, TableNotFoundException, AccumuloSecurityException {
@@ -59,7 +61,7 @@ public class SystemCredentialsIT extends ConfigurableMacIT {
     if (args.length < 2)
       throw new RuntimeException("Incorrect usage; expected to be run by test only");
     if (args[0].equals("bad")) {
-      creds = new SystemCredentials(new Instance() {
+      Instance inst = new Instance() {
 
         @Override
         public int getZooKeepersSessionTimeOut() {
@@ -114,12 +116,78 @@ public class SystemCredentialsIT extends ConfigurableMacIT {
           throw new UnsupportedOperationException();
         }
 
-      });
+      };
+      creds = SystemCredentials.get(inst);
     } else if (args[0].equals("good")) {
       creds = SystemCredentials.get(HdfsZooInstance.getInstance());
+    } else if (args[0].equals("bad_password")) {
+      Instance inst = new Instance() {
+
+        @Override
+        public int getZooKeepersSessionTimeOut() {
+          throw new UnsupportedOperationException();
+        }
+
+        @Override
+        public String getZooKeepers() {
+          throw new UnsupportedOperationException();
+        }
+
+        @Override
+        public String getRootTabletLocation() {
+          throw new UnsupportedOperationException();
+        }
+
+        @Override
+        public List<String> getMasterLocations() {
+          throw new UnsupportedOperationException();
+        }
+
+        @Override
+        public String getInstanceName() {
+          throw new UnsupportedOperationException();
+        }
+
+        @Override
+        public String getInstanceID() {
+          return SystemCredentials.class.getName();
+        }
+
+        @Override
+        public Connector getConnector(String principal, AuthenticationToken token) throws AccumuloException, AccumuloSecurityException {
+          throw new UnsupportedOperationException();
+        }
+
+        @Deprecated
+        @Override
+        public Connector getConnector(String user, CharSequence pass) throws AccumuloException, AccumuloSecurityException {
+          throw new UnsupportedOperationException();
+        }
+
+        @Deprecated
+        @Override
+        public Connector getConnector(String user, ByteBuffer pass) throws AccumuloException, AccumuloSecurityException {
+          throw new UnsupportedOperationException();
+        }
+
+        @Deprecated
+        @Override
+        public Connector getConnector(String user, byte[] pass) throws AccumuloException, AccumuloSecurityException {
+          throw new UnsupportedOperationException();
+        }
+
+      };
+      creds = new SystemCredentials(inst, "!SYSTEM", new PasswordToken("fake"));
     }
     Instance instance = HdfsZooInstance.getInstance();
-    Connector conn = instance.getConnector(creds.getPrincipal(), creds.getToken());
+    Connector conn;
+    try {
+      conn = instance.getConnector(creds.getPrincipal(), creds.getToken());
+    } catch (AccumuloSecurityException e) {
+      e.printStackTrace(System.err);
+      System.exit(BAD_PASSWD_FAIL_CODE);
+      return;
+    }
     try {
       Scanner scan = conn.createScanner(RootTable.NAME, Authorizations.EMPTY);
       for (Entry<Key,Value> e : scan) {

http://git-wip-us.apache.org/repos/asf/accumulo/blob/4f19aa1f/test/src/test/java/org/apache/accumulo/test/ArbitraryTablePropertiesIT.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/accumulo/test/ArbitraryTablePropertiesIT.java b/test/src/test/java/org/apache/accumulo/test/ArbitraryTablePropertiesIT.java
index aa5c164..4481934 100644
--- a/test/src/test/java/org/apache/accumulo/test/ArbitraryTablePropertiesIT.java
+++ b/test/src/test/java/org/apache/accumulo/test/ArbitraryTablePropertiesIT.java
@@ -21,12 +21,15 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.security.tokens.KerberosToken;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.conf.Property;
 import org.apache.accumulo.core.security.TablePermission;
 import org.apache.accumulo.harness.SharedMiniClusterIT;
 import org.apache.log4j.Logger;
 import org.junit.Assert;
+import org.junit.Assume;
+import org.junit.Before;
 import org.junit.Test;
 
 public class ArbitraryTablePropertiesIT extends SharedMiniClusterIT {
@@ -37,6 +40,11 @@ public class ArbitraryTablePropertiesIT extends SharedMiniClusterIT {
     return 30;
   };
 
+  @Before
+  public void checkNoKerberos() {
+    Assume.assumeFalse(getToken() instanceof KerberosToken);
+  }
+
   // Test set, get, and remove arbitrary table properties on the root account
   @Test
   public void setGetRemoveTablePropertyRoot() throws Exception {

http://git-wip-us.apache.org/repos/asf/accumulo/blob/4f19aa1f/test/src/test/java/org/apache/accumulo/test/CleanWalIT.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/accumulo/test/CleanWalIT.java b/test/src/test/java/org/apache/accumulo/test/CleanWalIT.java
index 1fcd5a4..bdfbd13 100644
--- a/test/src/test/java/org/apache/accumulo/test/CleanWalIT.java
+++ b/test/src/test/java/org/apache/accumulo/test/CleanWalIT.java
@@ -70,10 +70,12 @@ public class CleanWalIT extends AccumuloClusterIT {
 
   @After
   public void onlineTraceTable() throws Exception {
-    Connector conn = getConnector();
-    String traceTable = conn.instanceOperations().getSystemConfiguration().get(Property.TRACE_TABLE.getKey());
-    if (conn.tableOperations().exists(traceTable)) {
-      conn.tableOperations().online(traceTable, true);
+    if (null != cluster) {
+      Connector conn = getConnector();
+      String traceTable = conn.instanceOperations().getSystemConfiguration().get(Property.TRACE_TABLE.getKey());
+      if (conn.tableOperations().exists(traceTable)) {
+        conn.tableOperations().online(traceTable, true);
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/accumulo/blob/4f19aa1f/test/src/test/java/org/apache/accumulo/test/functional/BatchScanSplitIT.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/accumulo/test/functional/BatchScanSplitIT.java b/test/src/test/java/org/apache/accumulo/test/functional/BatchScanSplitIT.java
index 221889b..30d6958 100644
--- a/test/src/test/java/org/apache/accumulo/test/functional/BatchScanSplitIT.java
+++ b/test/src/test/java/org/apache/accumulo/test/functional/BatchScanSplitIT.java
@@ -20,7 +20,6 @@ import static java.nio.charset.StandardCharsets.UTF_8;
 
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map.Entry;
 import java.util.Random;
@@ -48,7 +47,7 @@ public class BatchScanSplitIT extends AccumuloClusterIT {
 
   @Override
   public void configureMiniCluster(MiniAccumuloConfigImpl cfg, Configuration hadoopCoreSite) {
-    cfg.setSiteConfig(Collections.singletonMap(Property.TSERV_MAJC_DELAY.getKey(), "0"));
+    cfg.setProperty(Property.TSERV_MAJC_DELAY, "0");
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/accumulo/blob/4f19aa1f/test/src/test/java/org/apache/accumulo/test/functional/KerberosIT.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/accumulo/test/functional/KerberosIT.java b/test/src/test/java/org/apache/accumulo/test/functional/KerberosIT.java
new file mode 100644
index 0000000..e3da6eb
--- /dev/null
+++ b/test/src/test/java/org/apache/accumulo/test/functional/KerberosIT.java
@@ -0,0 +1,316 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.test.functional;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map.Entry;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.BatchWriter;
+import org.apache.accumulo.core.client.BatchWriterConfig;
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.Scanner;
+import org.apache.accumulo.core.client.TableExistsException;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.accumulo.core.client.admin.CompactionConfig;
+import org.apache.accumulo.core.client.security.tokens.KerberosToken;
+import org.apache.accumulo.core.client.security.tokens.PasswordToken;
+import org.apache.accumulo.core.conf.Property;
+import org.apache.accumulo.core.data.Key;
+import org.apache.accumulo.core.data.Mutation;
+import org.apache.accumulo.core.data.Value;
+import org.apache.accumulo.core.metadata.MetadataTable;
+import org.apache.accumulo.core.metadata.RootTable;
+import org.apache.accumulo.core.security.Authorizations;
+import org.apache.accumulo.core.security.ColumnVisibility;
+import org.apache.accumulo.core.security.SystemPermission;
+import org.apache.accumulo.core.security.TablePermission;
+import org.apache.accumulo.harness.AccumuloIT;
+import org.apache.accumulo.harness.MiniClusterHarness;
+import org.apache.accumulo.harness.TestingKdc;
+import org.apache.accumulo.minicluster.impl.MiniAccumuloClusterImpl;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Sets;
+
+/**
+ * MAC test which uses {@link MiniKdc} to simulate ta secure environment. Can be used as a sanity check for Kerberos/SASL testing.
+ */
+public class KerberosIT extends AccumuloIT {
+  private static final Logger log = LoggerFactory.getLogger(KerberosIT.class);
+
+  private static TestingKdc kdc;
+  private static String krbEnabledForITs = null;
+
+  @BeforeClass
+  public static void startKdc() throws Exception {
+    kdc = new TestingKdc();
+    kdc.start();
+    krbEnabledForITs = System.getProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION);
+    if (null == krbEnabledForITs || !Boolean.parseBoolean(krbEnabledForITs)) {
+      System.setProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION, "true");
+    }
+  }
+
+  @AfterClass
+  public static void stopKdc() throws Exception {
+    if (null != kdc) {
+      kdc.stop();
+    }
+    if (null != krbEnabledForITs) {
+      System.setProperty(MiniClusterHarness.USE_KERBEROS_FOR_IT_OPTION, krbEnabledForITs);
+    }
+  }
+
+  private MiniAccumuloClusterImpl mac;
+
+  @Before
+  public void startMac() throws Exception {
+    MiniClusterHarness harness = new MiniClusterHarness();
+    mac = harness.create(this, new PasswordToken("unused"), kdc);
+    mac.getConfig().setNumTservers(1);
+    mac.start();
+    // Enabled kerberos auth
+    Configuration conf = new Configuration(false);
+    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
+    UserGroupInformation.setConfiguration(conf);
+  }
+
+  @After
+  public void stopMac() throws Exception {
+    if (null != mac) {
+      mac.stop();
+    }
+  }
+
+  @Test
+  public void testAdminUser() throws Exception {
+    // Login as the client (provided to `accumulo init` as the "root" user)
+    UserGroupInformation.loginUserFromKeytab(kdc.getClientPrincipal(), kdc.getClientKeytab().getAbsolutePath());
+
+    final Connector conn = mac.getConnector(kdc.getClientPrincipal(), new KerberosToken());
+
+    // The "root" user should have all system permissions
+    for (SystemPermission perm : SystemPermission.values()) {
+      assertTrue("Expected user to have permission: " + perm, conn.securityOperations().hasSystemPermission(conn.whoami(), perm));
+    }
+
+    // and the ability to modify the root and metadata tables
+    for (String table : Arrays.asList(RootTable.NAME, MetadataTable.NAME)){
+      assertTrue(conn.securityOperations().hasTablePermission(conn.whoami(), table, TablePermission.ALTER_TABLE));
+    }
+  }
+
+  @Test
+  public void testNewUser() throws Exception {
+    String newUser = testName.getMethodName();
+    final File newUserKeytab = new File(kdc.getKeytabDir(), newUser + ".keytab");
+    if (newUserKeytab.exists()) {
+      newUserKeytab.delete();
+    }
+
+    // Create a new user
+    kdc.createPrincipal(newUserKeytab, newUser);
+
+    newUser = kdc.qualifyUser(newUser);
+
+    // Login as the "root" user
+    UserGroupInformation.loginUserFromKeytab(kdc.getClientPrincipal(), kdc.getClientKeytab().getAbsolutePath());
+    log.info("Logged in as {}", kdc.getClientPrincipal());
+
+    Connector conn = mac.getConnector(kdc.getClientPrincipal(), new KerberosToken());
+    log.info("Created connector as {}", kdc.getClientPrincipal());
+    assertEquals(kdc.getClientPrincipal(), conn.whoami());
+
+    // Make sure the system user doesn't exist -- this will force some RPC to happen server-side
+    createTableWithDataAndCompact(conn);
+
+    HashSet<String> users = Sets.newHashSet(kdc.getClientPrincipal());
+    assertEquals(users, conn.securityOperations().listLocalUsers());
+
+    // Switch to a new user
+    UserGroupInformation.loginUserFromKeytab(newUser, newUserKeytab.getAbsolutePath());
+    log.info("Logged in as {}", newUser);
+
+    conn = mac.getConnector(newUser, new KerberosToken());
+    log.info("Created connector as {}", newUser);
+    assertEquals(newUser, conn.whoami());
+
+    // The new user should have no system permissions
+    for (SystemPermission perm : SystemPermission.values()) {
+      assertFalse(conn.securityOperations().hasSystemPermission(newUser, perm));
+    }
+
+    users.add(newUser);
+
+    // Same users as before, plus the new user we just created
+    assertEquals(users, conn.securityOperations().listLocalUsers());
+  }
+
+  @Test
+  public void testUserPrivilegesThroughGrant() throws Exception {
+    String user1 = testName.getMethodName();
+    final File user1Keytab = new File(kdc.getKeytabDir(), user1 + ".keytab");
+    if (user1Keytab.exists()) {
+      user1Keytab.delete();
+    }
+
+    // Create some new users
+    kdc.createPrincipal(user1Keytab, user1);
+
+    user1 = kdc.qualifyUser(user1);
+
+    // Log in as user1
+    UserGroupInformation.loginUserFromKeytab(user1, user1Keytab.getAbsolutePath());
+    log.info("Logged in as {}", user1);
+
+    // Indirectly creates this user when we use it
+    Connector conn = mac.getConnector(user1, new KerberosToken());
+    log.info("Created connector as {}", user1);
+
+    // The new user should have no system permissions
+    for (SystemPermission perm : SystemPermission.values()) {
+      assertFalse(conn.securityOperations().hasSystemPermission(user1, perm));
+    }
+
+    UserGroupInformation.loginUserFromKeytab(kdc.getClientPrincipal(), kdc.getClientKeytab().getAbsolutePath());
+    conn = mac.getConnector(kdc.getClientPrincipal(), new KerberosToken());
+
+    conn.securityOperations().grantSystemPermission(user1, SystemPermission.CREATE_TABLE);
+
+    // Switch back to the original user
+    UserGroupInformation.loginUserFromKeytab(user1, user1Keytab.getAbsolutePath());
+    conn = mac.getConnector(user1, new KerberosToken());
+
+    // Shouldn't throw an exception since we granted the create table permission
+    final String table = testName.getMethodName() + "_user_table";
+    conn.tableOperations().create(table);
+
+    // Make sure we can actually use the table we made
+    BatchWriter bw = conn.createBatchWriter(table, new BatchWriterConfig());
+    Mutation m = new Mutation("a");
+    m.put("b", "c", "d");
+    bw.addMutation(m);
+    bw.close();
+
+    conn.tableOperations().compact(table, new CompactionConfig().setWait(true).setFlush(true));
+  }
+
+  @Test
+  public void testUserPrivilegesForTable() throws Exception {
+    String user1 = testName.getMethodName();
+    final File user1Keytab = new File(kdc.getKeytabDir(), user1 + ".keytab");
+    if (user1Keytab.exists()) {
+      user1Keytab.delete();
+    }
+
+    // Create some new users -- cannot contain realm
+    kdc.createPrincipal(user1Keytab, user1);
+
+    user1 = kdc.qualifyUser(user1);
+
+    // Log in as user1
+    UserGroupInformation.loginUserFromKeytab(user1, user1Keytab.getAbsolutePath());
+    log.info("Logged in as {}", user1);
+
+    // Indirectly creates this user when we use it
+    Connector conn = mac.getConnector(user1, new KerberosToken());
+    log.info("Created connector as {}", user1);
+
+    // The new user should have no system permissions
+    for (SystemPermission perm : SystemPermission.values()) {
+      assertFalse(conn.securityOperations().hasSystemPermission(user1, perm));
+    }
+
+    UserGroupInformation.loginUserFromKeytab(kdc.getClientPrincipal(), kdc.getClientKeytab().getAbsolutePath());
+    conn = mac.getConnector(kdc.getClientPrincipal(), new KerberosToken());
+
+    final String table = testName.getMethodName() + "_user_table";
+    conn.tableOperations().create(table);
+
+    final String viz = "viz";
+
+    // Give our unprivileged user permission on the table we made for them
+    conn.securityOperations().grantTablePermission(user1, table, TablePermission.READ);
+    conn.securityOperations().grantTablePermission(user1, table, TablePermission.WRITE);
+    conn.securityOperations().grantTablePermission(user1, table, TablePermission.ALTER_TABLE);
+    conn.securityOperations().grantTablePermission(user1, table, TablePermission.DROP_TABLE);
+    conn.securityOperations().changeUserAuthorizations(user1, new Authorizations(viz));
+
+    // Switch back to the original user
+    UserGroupInformation.loginUserFromKeytab(user1, user1Keytab.getAbsolutePath());
+    conn = mac.getConnector(user1, new KerberosToken());
+
+    // Make sure we can actually use the table we made
+
+    // Write data
+    final long ts = 1000l;
+    BatchWriter bw = conn.createBatchWriter(table, new BatchWriterConfig());
+    Mutation m = new Mutation("a");
+    m.put("b", "c", new ColumnVisibility(viz.getBytes()), ts, "d");
+    bw.addMutation(m);
+    bw.close();
+
+    // Compact
+    conn.tableOperations().compact(table, new CompactionConfig().setWait(true).setFlush(true));
+
+    // Alter
+    conn.tableOperations().setProperty(table, Property.TABLE_BLOOM_ENABLED.getKey(), "true");
+
+    // Read (and proper authorizations)
+    Scanner s = conn.createScanner(table, new Authorizations(viz));
+    Iterator<Entry<Key,Value>> iter = s.iterator();
+    assertTrue("No results from iterator", iter.hasNext());
+    Entry<Key,Value> entry = iter.next();
+    assertEquals(new Key("a", "b", "c", viz, ts), entry.getKey());
+    assertEquals(new Value("d".getBytes()), entry.getValue());
+    assertFalse("Had more results from iterator", iter.hasNext());
+  }
+
+  /**
+   * Creates a table, adds a record to it, and then compacts the table. A simple way to make sure that the system user exists (since the master does an RPC to
+   * the tserver which will create the system user if it doesn't already exist).
+   */
+  private void createTableWithDataAndCompact(Connector conn) throws TableNotFoundException, AccumuloSecurityException, AccumuloException, TableExistsException {
+    final String table = testName.getMethodName() + "_table";
+    conn.tableOperations().create(table);
+    BatchWriter bw = conn.createBatchWriter(table, new BatchWriterConfig());
+    Mutation m = new Mutation("a");
+    m.put("b", "c", "d");
+    bw.addMutation(m);
+    bw.close();
+    conn.tableOperations().compact(table, new CompactionConfig().setFlush(true).setWait(true));
+  }
+}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/4f19aa1f/test/src/test/java/org/apache/accumulo/test/functional/MetadataIT.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/accumulo/test/functional/MetadataIT.java b/test/src/test/java/org/apache/accumulo/test/functional/MetadataIT.java
index 4c9207a..ae33651 100644
--- a/test/src/test/java/org/apache/accumulo/test/functional/MetadataIT.java
+++ b/test/src/test/java/org/apache/accumulo/test/functional/MetadataIT.java
@@ -38,6 +38,8 @@ import org.apache.accumulo.core.metadata.schema.MetadataSchema;
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.core.util.UtilWaitThread;
 import org.apache.accumulo.harness.AccumuloClusterIT;
+import org.apache.accumulo.minicluster.impl.MiniAccumuloConfigImpl;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.junit.Assert;
 import org.junit.Test;
@@ -45,6 +47,11 @@ import org.junit.Test;
 public class MetadataIT extends AccumuloClusterIT {
 
   @Override
+  public void configureMiniCluster(MiniAccumuloConfigImpl cfg, Configuration hadoopCoreSite) {
+    cfg.setNumTservers(1);
+  }
+
+  @Override
   public int defaultTimeoutSeconds() {
     return 2 * 60;
   }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/4f19aa1f/test/src/test/java/org/apache/accumulo/test/security/KerberosTokenTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/accumulo/test/security/KerberosTokenTest.java b/test/src/test/java/org/apache/accumulo/test/security/KerberosTokenTest.java
new file mode 100644
index 0000000..5568e9c
--- /dev/null
+++ b/test/src/test/java/org/apache/accumulo/test/security/KerberosTokenTest.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.test.security;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+
+import org.apache.accumulo.core.client.security.tokens.KerberosToken;
+import org.apache.accumulo.harness.TestingKdc;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class KerberosTokenTest {
+
+  @Rule
+  public TestName testName = new TestName();
+
+  private static TestingKdc kdc;
+
+  @BeforeClass
+  public static void startKdc() throws Exception {
+    kdc = new TestingKdc();
+    kdc.start();
+  }
+
+  @AfterClass
+  public static void stopKdc() throws Exception {
+    if (null != kdc) {
+      kdc.stop();
+    }
+  }
+
+  @Before
+  public void resetUgiForKrb() {
+    Configuration conf = new Configuration(false);
+    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
+    UserGroupInformation.setConfiguration(conf);
+  }
+
+  @Test
+  public void test() throws Exception {
+    String user = testName.getMethodName();
+    File userKeytab = new File(kdc.getKeytabDir(), user + ".keytab");
+    if (userKeytab.exists()) {
+      userKeytab.delete();
+    }
+
+    kdc.createPrincipal(userKeytab, user);
+
+    user = kdc.qualifyUser(user);
+
+    UserGroupInformation.loginUserFromKeytab(user, userKeytab.getAbsolutePath());
+    KerberosToken token = new KerberosToken();
+
+    assertEquals(user, token.getPrincipal());
+
+    // Use the long-hand constructor, should be equivalent to short-hand
+    KerberosToken tokenWithPrinc = new KerberosToken(user);
+    assertEquals(token, tokenWithPrinc);
+    assertEquals(token.hashCode(), tokenWithPrinc.hashCode());
+  }
+
+  @Test
+  public void testDestroy() throws Exception {
+    String user = testName.getMethodName();
+    File userKeytab = new File(kdc.getKeytabDir(), user + ".keytab");
+    if (userKeytab.exists()) {
+      userKeytab.delete();
+    }
+
+    kdc.createPrincipal(userKeytab, user);
+
+    user = kdc.qualifyUser(user);
+
+    UserGroupInformation.loginUserFromKeytab(user, userKeytab.getAbsolutePath());
+    KerberosToken token = new KerberosToken();
+
+    assertEquals(user, token.getPrincipal());
+    token.destroy();
+    assertTrue(token.isDestroyed());
+    assertNull(token.getPrincipal());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/4f19aa1f/test/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/test/src/test/resources/log4j.properties b/test/src/test/resources/log4j.properties
index cb35840..1b89dfe 100644
--- a/test/src/test/resources/log4j.properties
+++ b/test/src/test/resources/log4j.properties
@@ -43,3 +43,12 @@ log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=WARN
 log4j.logger.org.apache.hadoop.hdfs.server.datanode.DataNode.clienttrace=WARN
 log4j.logger.BlockStateChange=WARN
 log4j.logger.org.apache.accumulo.core.client.impl.TabletServerBatchReaderIterator=INFO
+log4j.logger.org.apache.hadoop.security=DEBUG
+log4j.logger.org.apache.hadoop.minikdc=DEBUG
+log4j.logger.org.apache.directory=INFO
+log4j.logger.org.apache.directory.api.ldap=WARN
+# This is really spammy at debug
+log4j.logger.org.apache.thrift.transport.TSaslTransport=INFO
+# From apache-ds/minikdc
+log4j.logger.org.apache.mina=INFO
+log4j.logger.org.apache.accumulo.server.thrift.UGIAssumingProcessor=TRACE
\ No newline at end of file


Mime
View raw message