accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mwa...@apache.org
Subject [accumulo] branch master updated: Fix #883 Specify client props as HDFS path in new M/R API (#894)
Date Fri, 11 Jan 2019 23:01:11 GMT
This is an automated email from the ASF dual-hosted git repository.

mwalch pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/accumulo.git


The following commit(s) were added to refs/heads/master by this push:
     new 44b4af2  Fix #883 Specify client props as HDFS path in new M/R API (#894)
44b4af2 is described below

commit 44b4af2c5243f7c123ea22e364b70158512a0643
Author: Mike Walch <mwalch@apache.org>
AuthorDate: Fri Jan 11 18:01:07 2019 -0500

    Fix #883 Specify client props as HDFS path in new M/R API (#894)
    
    * Also cleaned up new M/R API by removing unnecessary methods
      and using properties in place of ClientInfo
---
 .../hadoop/mapred/AccumuloOutputFormat.java        |  12 +-
 .../hadoop/mapreduce/AccumuloOutputFormat.java     |  12 +-
 .../hadoop/mapreduce/InputFormatBuilder.java       |  16 +-
 .../hadoop/mapreduce/OutputFormatBuilder.java      |  14 +-
 .../hadoopImpl/mapred/AccumuloRecordReader.java    |   4 +-
 .../hadoopImpl/mapred/AccumuloRecordWriter.java    |   4 +-
 .../hadoopImpl/mapreduce/AccumuloRecordReader.java |  11 +-
 .../hadoopImpl/mapreduce/AccumuloRecordWriter.java |   4 +-
 .../mapreduce/InputFormatBuilderImpl.java          |  42 ++---
 .../mapreduce/OutputFormatBuilderImpl.java         |  23 ++-
 .../hadoopImpl/mapreduce/lib/ConfiguratorBase.java | 173 +++------------------
 .../mapreduce/lib/InputConfigurator.java           |   8 +-
 .../mapreduce/lib/OutputConfigurator.java          |   4 +-
 .../hadoop/its/mapred/AccumuloOutputFormatIT.java  |   1 -
 .../mapreduce/lib/ConfiguratorBaseTest.java        |  43 ++---
 15 files changed, 116 insertions(+), 255 deletions(-)

diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapred/AccumuloOutputFormat.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapred/AccumuloOutputFormat.java
index 2386081..cce542d 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapred/AccumuloOutputFormat.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapred/AccumuloOutputFormat.java
@@ -17,13 +17,14 @@
 package org.apache.accumulo.hadoop.mapred;
 
 import java.io.IOException;
+import java.util.Properties;
 
 import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
-import org.apache.accumulo.core.clientImpl.ClientInfo;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.hadoop.mapreduce.OutputFormatBuilder;
 import org.apache.accumulo.hadoopImpl.mapred.AccumuloRecordWriter;
@@ -46,11 +47,10 @@ public class AccumuloOutputFormat implements OutputFormat<Text,Mutation>
{
 
   @Override
   public void checkOutputSpecs(FileSystem ignored, JobConf job) throws IOException {
-    ClientInfo clientInfo = OutputConfigurator.getClientInfo(CLASS, job);
-    String principal = clientInfo.getPrincipal();
-    AuthenticationToken token = clientInfo.getAuthenticationToken();
-    try (AccumuloClient c = Accumulo.newClient().from(clientInfo.getProperties()).build())
{
-      if (!c.securityOperations().authenticateUser(principal, token))
+    Properties clientProps = OutputConfigurator.getClientProperties(CLASS, job);
+    AuthenticationToken token = ClientProperty.getAuthenticationToken(clientProps);
+    try (AccumuloClient c = Accumulo.newClient().from(clientProps).build()) {
+      if (!c.securityOperations().authenticateUser(c.whoami(), token))
         throw new IOException("Unable to authenticate user");
     } catch (AccumuloException | AccumuloSecurityException e) {
       throw new IOException(e);
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormat.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormat.java
index d85b2c8..4c84211 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormat.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormat.java
@@ -17,13 +17,14 @@
 package org.apache.accumulo.hadoop.mapreduce;
 
 import java.io.IOException;
+import java.util.Properties;
 
 import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
-import org.apache.accumulo.core.clientImpl.ClientInfo;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.hadoopImpl.mapreduce.AccumuloRecordWriter;
 import org.apache.accumulo.hadoopImpl.mapreduce.OutputFormatBuilderImpl;
@@ -57,11 +58,10 @@ public class AccumuloOutputFormat extends OutputFormat<Text,Mutation>
{
 
   @Override
   public void checkOutputSpecs(JobContext job) throws IOException {
-    ClientInfo clientInfo = OutputConfigurator.getClientInfo(CLASS, job.getConfiguration());
-    String principal = clientInfo.getPrincipal();
-    AuthenticationToken token = clientInfo.getAuthenticationToken();
-    try (AccumuloClient c = Accumulo.newClient().from(clientInfo.getProperties()).build())
{
-      if (!c.securityOperations().authenticateUser(principal, token))
+    Properties clientProps = OutputConfigurator.getClientProperties(CLASS, job.getConfiguration());
+    AuthenticationToken token = ClientProperty.getAuthenticationToken(clientProps);
+    try (AccumuloClient c = Accumulo.newClient().from(clientProps).build()) {
+      if (!c.securityOperations().authenticateUser(c.whoami(), token))
         throw new IOException("Unable to authenticate user");
     } catch (AccumuloException | AccumuloSecurityException e) {
       throw new IOException(e);
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/InputFormatBuilder.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/InputFormatBuilder.java
index ddb4deb..d274e7b 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/InputFormatBuilder.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/InputFormatBuilder.java
@@ -45,14 +45,26 @@ public interface InputFormatBuilder {
    * @since 2.0
    */
   interface ClientParams<T> {
+
     /**
-     * Set the connection information needed to communicate with Accumulo in this job.
-     * clientProperties param can be created using {@link Accumulo#newClientProperties()}
+     * Set client properties needed to communicate with Accumulo for this job. This information
will
+     * be serialized into the configuration. Therefore, it is more secure to use
+     * {@link #clientPropertiesPath(String)}. Client properties can be created using
+     * {@link Accumulo#newClientProperties()}
      *
      * @param clientProperties
      *          Accumulo connection information
      */
     TableParams<T> clientProperties(Properties clientProperties);
+
+    /**
+     * Set path to DFS location containing accumulo-client.properties file. This setting
is more
+     * secure than {@link #clientProperties(Properties)}
+     *
+     * @param clientPropsPath
+     *          DFS path to accumulo-client.properties
+     */
+    TableParams<T> clientPropertiesPath(String clientPropsPath);
   }
 
   /**
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/OutputFormatBuilder.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/OutputFormatBuilder.java
index 678ba31..134346e 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/OutputFormatBuilder.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/OutputFormatBuilder.java
@@ -36,12 +36,23 @@ public interface OutputFormatBuilder {
   interface ClientParams<T> {
     /**
      * Set the connection information needed to communicate with Accumulo in this job.
-     * clientProperties param can be created using {@link Accumulo#newClientProperties()}
+     * clientProperties param can be created using {@link Accumulo#newClientProperties()}.
Client
+     * properties will be serialized into configuration. Therefore it is more secure to use
+     * {@link #clientPropertiesPath(String)}
      *
      * @param clientProperties
      *          Accumulo connection information
      */
     OutputOptions<T> clientProperties(Properties clientProperties);
+
+    /**
+     * Set path to DFS location containing accumulo-client.properties file. This setting
is more
+     * secure than {@link #clientProperties(Properties)}
+     *
+     * @param clientPropsPath
+     *          DFS path to accumulo-client.properties
+     */
+    OutputOptions<T> clientPropertiesPath(String clientPropsPath);
   }
 
   /**
@@ -80,5 +91,4 @@ public interface OutputFormatBuilder {
      */
     void store(T j);
   }
-
 }
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordReader.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordReader.java
index a2e386e..279e53c 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordReader.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordReader.java
@@ -31,7 +31,6 @@ import java.util.Map;
 import java.util.Random;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.BatchScanner;
@@ -445,7 +444,6 @@ public abstract class AccumuloRecordReader<K,V> implements RecordReader<K,V>
{
    * Creates {@link AccumuloClient} from the configuration
    */
   private static AccumuloClient createClient(JobConf job, Class<?> callingClass) {
-    return Accumulo.newClient()
-        .from(InputConfigurator.getClientInfo(callingClass, job).getProperties()).build();
+    return InputConfigurator.createClient(callingClass, job);
   }
 }
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordWriter.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordWriter.java
index 4ff1323..5ce64a3 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordWriter.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordWriter.java
@@ -22,7 +22,6 @@ import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
@@ -77,8 +76,7 @@ public class AccumuloRecordWriter implements RecordWriter<Text,Mutation>
{
     this.defaultTableName = (tname == null) ? null : new Text(tname);
 
     if (!simulate) {
-      this.client = Accumulo.newClient()
-          .from(OutputConfigurator.getClientInfo(CLASS, job).getProperties()).build();
+      this.client = OutputConfigurator.createClient(CLASS, job);
       mtbw = client.createMultiTableBatchWriter();
     }
   }
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordReader.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordReader.java
index 32fc759..8e7d068 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordReader.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordReader.java
@@ -31,7 +31,6 @@ import java.util.Map;
 import java.util.Random;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.BatchScanner;
@@ -45,7 +44,6 @@ import org.apache.accumulo.core.client.TableNotFoundException;
 import org.apache.accumulo.core.client.TableOfflineException;
 import org.apache.accumulo.core.client.sample.SamplerConfiguration;
 import org.apache.accumulo.core.clientImpl.ClientContext;
-import org.apache.accumulo.core.clientImpl.ClientInfo;
 import org.apache.accumulo.core.clientImpl.OfflineScanner;
 import org.apache.accumulo.core.clientImpl.ScannerImpl;
 import org.apache.accumulo.core.clientImpl.Table;
@@ -464,16 +462,9 @@ public abstract class AccumuloRecordReader<K,V> extends RecordReader<K,V>
{
   }
 
   /**
-   * Gets the {@link ClientInfo} from the configuration
-   */
-  private static ClientInfo getClientInfo(JobContext context, Class<?> callingClass)
{
-    return InputConfigurator.getClientInfo(callingClass, context.getConfiguration());
-  }
-
-  /**
    * Creates {@link AccumuloClient} from the configuration
    */
   private static AccumuloClient createClient(JobContext context, Class<?> callingClass)
{
-    return Accumulo.newClient().from(getClientInfo(context, callingClass).getProperties()).build();
+    return InputConfigurator.createClient(callingClass, context.getConfiguration());
   }
 }
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordWriter.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordWriter.java
index 3fafdb6..680d813 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordWriter.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordWriter.java
@@ -22,7 +22,6 @@ import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
@@ -78,8 +77,7 @@ public class AccumuloRecordWriter extends RecordWriter<Text,Mutation>
{
     this.defaultTableName = (tname == null) ? null : new Text(tname);
 
     if (!simulate) {
-      this.client = Accumulo.newClient()
-          .from(OutputConfigurator.getClientInfo(CLASS, conf).getProperties()).build();
+      this.client = OutputConfigurator.createClient(CLASS, conf);
       mtbw = client.createMultiTableBatchWriter();
     }
   }
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/InputFormatBuilderImpl.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/InputFormatBuilderImpl.java
index 119a921..5b3bac0 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/InputFormatBuilderImpl.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/InputFormatBuilderImpl.java
@@ -30,7 +30,7 @@ import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.IteratorSetting;
 import org.apache.accumulo.core.client.sample.SamplerConfiguration;
-import org.apache.accumulo.core.clientImpl.ClientInfo;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.hadoop.mapreduce.InputFormatBuilder;
@@ -46,11 +46,11 @@ public class InputFormatBuilderImpl<T>
     implements InputFormatBuilder, InputFormatBuilder.ClientParams<T>,
     InputFormatBuilder.TableParams<T>, InputFormatBuilder.InputFormatOptions<T>
{
 
-  Class<?> callingClass;
-  ClientInfo clientInfo;
-
-  String currentTable;
-  Map<String,InputTableConfig> tableConfigMap = Collections.emptyMap();
+  private Class<?> callingClass;
+  private Properties clientProps;
+  private String clientPropsPath;
+  private String currentTable;
+  private Map<String,InputTableConfig> tableConfigMap = Collections.emptyMap();
 
   public InputFormatBuilderImpl(Class<?> callingClass) {
     this.callingClass = callingClass;
@@ -58,8 +58,15 @@ public class InputFormatBuilderImpl<T>
 
   @Override
   public InputFormatBuilder.TableParams<T> clientProperties(Properties clientProperties)
{
-    this.clientInfo = ClientInfo
-        .from(Objects.requireNonNull(clientProperties, "clientProperties must not be null"));
+    this.clientProps = Objects.requireNonNull(clientProperties,
+        "clientProperties must not be null");
+    return this;
+  }
+
+  @Override
+  public TableParams<T> clientPropertiesPath(String clientPropsPath) {
+    this.clientPropsPath = Objects.requireNonNull(clientPropsPath,
+        "clientPropsPath must not be null");
     return this;
   }
 
@@ -182,7 +189,7 @@ public class InputFormatBuilderImpl<T>
   }
 
   private void _store(Configuration conf) throws AccumuloException, AccumuloSecurityException
{
-    InputConfigurator.setClientInfo(callingClass, conf, clientInfo);
+    InputConfigurator.setClientProperties(callingClass, conf, clientProps, clientPropsPath);
     if (tableConfigMap.size() == 0) {
       throw new IllegalArgumentException("At least one Table must be configured for job.");
     }
@@ -191,8 +198,13 @@ public class InputFormatBuilderImpl<T>
       Map.Entry<String,InputTableConfig> entry = tableConfigMap.entrySet().iterator().next();
       InputConfigurator.setInputTableName(callingClass, conf, entry.getKey());
       InputTableConfig config = entry.getValue();
-      if (!config.getScanAuths().isPresent())
-        config.setScanAuths(getUserAuths(clientInfo));
+      if (!config.getScanAuths().isPresent()) {
+        Properties props = InputConfigurator.getClientProperties(callingClass, conf);
+        try (AccumuloClient c = Accumulo.newClient().from(props).build()) {
+          String principal = ClientProperty.AUTH_PRINCIPAL.getValue(props);
+          config.setScanAuths(c.securityOperations().getUserAuthorizations(principal));
+        }
+      }
       InputConfigurator.setScanAuthorizations(callingClass, conf, config.getScanAuths().get());
       // all optional values
       if (config.getContext().isPresent())
@@ -224,12 +236,4 @@ public class InputFormatBuilderImpl<T>
   private void store(JobConf jobConf) throws AccumuloException, AccumuloSecurityException
{
     _store(jobConf);
   }
-
-  private Authorizations getUserAuths(ClientInfo clientInfo)
-      throws AccumuloSecurityException, AccumuloException {
-    try (AccumuloClient c = Accumulo.newClient().from(clientInfo.getProperties()).build())
{
-      return c.securityOperations().getUserAuthorizations(clientInfo.getPrincipal());
-    }
-  }
-
 }
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/OutputFormatBuilderImpl.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/OutputFormatBuilderImpl.java
index b0c7656..d7582ea 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/OutputFormatBuilderImpl.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/OutputFormatBuilderImpl.java
@@ -20,7 +20,6 @@ import java.util.Objects;
 import java.util.Optional;
 import java.util.Properties;
 
-import org.apache.accumulo.core.clientImpl.ClientInfo;
 import org.apache.accumulo.hadoop.mapreduce.OutputFormatBuilder;
 import org.apache.accumulo.hadoopImpl.mapreduce.lib.OutputConfigurator;
 import org.apache.hadoop.conf.Configuration;
@@ -30,12 +29,13 @@ import org.apache.hadoop.mapreduce.Job;
 public class OutputFormatBuilderImpl<T>
     implements OutputFormatBuilder.ClientParams<T>, OutputFormatBuilder.OutputOptions<T>
{
   private final Class<?> callingClass;
-  ClientInfo clientInfo;
+  private Properties clientProps;
+  private String clientPropsPath;
 
   // optional values
-  Optional<String> defaultTableName = Optional.empty();
-  boolean createTables = false;
-  boolean simulationMode = false;
+  private Optional<String> defaultTableName = Optional.empty();
+  private boolean createTables = false;
+  private boolean simulationMode = false;
 
   public OutputFormatBuilderImpl(Class<?> callingClass) {
     this.callingClass = callingClass;
@@ -43,8 +43,15 @@ public class OutputFormatBuilderImpl<T>
 
   @Override
   public OutputFormatBuilder.OutputOptions<T> clientProperties(Properties clientProperties)
{
-    this.clientInfo = ClientInfo
-        .from(Objects.requireNonNull(clientProperties, "ClientInfo must not be null"));
+    this.clientProps = Objects.requireNonNull(clientProperties,
+        "clientProperties must not be null");
+    return this;
+  }
+
+  @Override
+  public OutputFormatBuilder.OutputOptions<T> clientPropertiesPath(String clientPropsPath)
{
+    this.clientPropsPath = Objects.requireNonNull(clientPropsPath,
+        "clientPropsPath must not be null");
     return this;
   }
 
@@ -82,7 +89,7 @@ public class OutputFormatBuilderImpl<T>
   }
 
   private void _store(Configuration conf) {
-    OutputConfigurator.setClientInfo(callingClass, conf, clientInfo);
+    OutputConfigurator.setClientProperties(callingClass, conf, clientProps, clientPropsPath);
     if (defaultTableName.isPresent())
       OutputConfigurator.setDefaultTableName(callingClass, conf, defaultTableName.get());
     OutputConfigurator.setCreateTables(callingClass, conf, createTables);
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBase.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBase.java
index dd2c3dc..810320d 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBase.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBase.java
@@ -16,8 +16,6 @@
  */
 package org.apache.accumulo.hadoopImpl.mapreduce.lib;
 
-import static com.google.common.base.Preconditions.checkArgument;
-
 import java.io.IOException;
 import java.io.StringReader;
 import java.io.StringWriter;
@@ -29,41 +27,19 @@ import java.util.Scanner;
 import org.apache.accumulo.core.Constants;
 import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
-import org.apache.accumulo.core.client.admin.DelegationTokenConfig;
-import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
-import org.apache.accumulo.core.client.security.tokens.KerberosToken;
-import org.apache.accumulo.core.clientImpl.AuthenticationTokenIdentifier;
-import org.apache.accumulo.core.clientImpl.ClientInfo;
-import org.apache.accumulo.core.clientImpl.ClientInfoImpl;
-import org.apache.accumulo.core.clientImpl.DelegationTokenImpl;
-import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * @since 1.6.0
  */
 public class ConfiguratorBase {
 
-  private static final Logger log = LoggerFactory.getLogger(ConfiguratorBase.class);
-
-  /**
-   * Specifies that connection info was configured
-   *
-   * @since 1.6.0
-   */
-  public enum ConnectorInfo {
-    IS_CONFIGURED
-  }
-
   public enum ClientOpts {
-    CLIENT_PROPS, CLIENT_PROPS_FILE
+    CLIENT_PROPS, CLIENT_PROPS_FILE, IS_CONFIGURED
   }
 
   /**
@@ -102,69 +78,26 @@ public class ConfiguratorBase {
         + StringUtils.camelize(e.name().toLowerCase());
   }
 
-  public static ClientInfo updateToken(org.apache.hadoop.security.Credentials credentials,
-      ClientInfo info) {
-    ClientInfo result = info;
-    if (info.getAuthenticationToken() instanceof KerberosToken) {
-      log.info("Received KerberosToken, attempting to fetch DelegationToken");
-      try (AccumuloClient client = Accumulo.newClient().from(info.getProperties()).build())
{
-        AuthenticationToken token = client.securityOperations()
-            .getDelegationToken(new DelegationTokenConfig());
-        result = ClientInfo.from(Accumulo.newClientProperties().from(info.getProperties())
-            .as(info.getPrincipal(), token).build());
-      } catch (Exception e) {
-        log.warn("Failed to automatically obtain DelegationToken, "
-            + "Mappers/Reducers will likely fail to communicate with Accumulo", e);
-      }
-    }
-    // DelegationTokens can be passed securely from user to task without serializing insecurely
in
-    // the configuration
-    if (info.getAuthenticationToken() instanceof DelegationTokenImpl) {
-      DelegationTokenImpl delegationToken = (DelegationTokenImpl) info.getAuthenticationToken();
-
-      // Convert it into a Hadoop Token
-      AuthenticationTokenIdentifier identifier = delegationToken.getIdentifier();
-      Token<AuthenticationTokenIdentifier> hadoopToken = new Token<>(identifier.getBytes(),
-          delegationToken.getPassword(), identifier.getKind(), delegationToken.getServiceName());
-
-      // Add the Hadoop Token to the Job so it gets serialized and passed along.
-      credentials.addToken(hadoopToken.getService(), hadoopToken);
-    }
-    return result;
-  }
-
-  public static void setClientInfo(Class<?> implementingClass, Configuration conf,
-      ClientInfo info) {
-    setClientProperties(implementingClass, conf, info.getProperties());
-    conf.setBoolean(enumToConfKey(implementingClass, ConnectorInfo.IS_CONFIGURED), true);
-  }
-
-  public static ClientInfo getClientInfo(Class<?> implementingClass, Configuration
conf) {
-    Properties props = getClientProperties(implementingClass, conf);
-    return new ClientInfoImpl(props);
-  }
-
-  public static void setClientPropertiesFile(Class<?> implementingClass, Configuration
conf,
-      String clientPropertiesFile) {
-    try {
-      DistributedCacheHelper.addCacheFile(new URI(clientPropertiesFile), conf);
-    } catch (URISyntaxException e) {
-      throw new IllegalStateException("Unable to add client properties file \""
-          + clientPropertiesFile + "\" to distributed cache.");
-    }
-    conf.set(enumToConfKey(implementingClass, ClientOpts.CLIENT_PROPS_FILE), clientPropertiesFile);
-    conf.setBoolean(enumToConfKey(implementingClass, ConnectorInfo.IS_CONFIGURED), true);
-  }
-
   public static void setClientProperties(Class<?> implementingClass, Configuration
conf,
-      Properties props) {
-    StringWriter writer = new StringWriter();
-    try {
-      props.store(writer, "client properties");
-    } catch (IOException e) {
-      throw new IllegalStateException(e);
+      Properties props, String clientPropsPath) {
+    if (clientPropsPath != null) {
+      try {
+        DistributedCacheHelper.addCacheFile(new URI(clientPropsPath), conf);
+      } catch (URISyntaxException e) {
+        throw new IllegalStateException("Unable to add client properties file \"" + clientPropsPath
+            + "\" to distributed cache.");
+      }
+      conf.set(enumToConfKey(implementingClass, ClientOpts.CLIENT_PROPS_FILE), clientPropsPath);
+    } else {
+      StringWriter writer = new StringWriter();
+      try {
+        props.store(writer, "client properties");
+      } catch (IOException e) {
+        throw new IllegalStateException(e);
+      }
+      conf.set(enumToConfKey(implementingClass, ClientOpts.CLIENT_PROPS), writer.toString());
     }
-    conf.set(enumToConfKey(implementingClass, ClientOpts.CLIENT_PROPS), writer.toString());
+    conf.setBoolean(enumToConfKey(implementingClass, ClientOpts.IS_CONFIGURED), true);
   }
 
   public static Properties getClientProperties(Class<?> implementingClass, Configuration
conf) {
@@ -208,35 +141,6 @@ public class ConfiguratorBase {
   }
 
   /**
-   * Sets the connector information needed to communicate with Accumulo in this job.
-   *
-   * <p>
-   * <b>WARNING:</b> The serialized token is stored in the configuration and
shared with all
-   * MapReduce tasks. It is BASE64 encoded to provide a charset safe conversion to a string,
and is
-   * not intended to be secure.
-   *
-   * @param implementingClass
-   *          the class whose name will be used as a prefix for the property configuration
key
-   * @param conf
-   *          the Hadoop configuration object to configure
-   * @param principal
-   *          a valid Accumulo user name
-   * @param token
-   *          the user's password
-   * @since 1.6.0
-   */
-  public static void setConnectorInfo(Class<?> implementingClass, Configuration conf,
-      String principal, AuthenticationToken token) {
-    checkArgument(principal != null, "principal is null");
-    checkArgument(token != null, "token is null");
-    Properties props = getClientProperties(implementingClass, conf);
-    props.setProperty(ClientProperty.AUTH_PRINCIPAL.getKey(), principal);
-    ClientProperty.setAuthenticationToken(props, token);
-    setClientProperties(implementingClass, conf, props);
-    conf.setBoolean(enumToConfKey(implementingClass, ConnectorInfo.IS_CONFIGURED), true);
-  }
-
-  /**
    * Determines if the connector info has already been set for this instance.
    *
    * @param implementingClass
@@ -245,44 +149,9 @@ public class ConfiguratorBase {
    *          the Hadoop configuration object to configure
    * @return true if the connector info has already been set, false otherwise
    * @since 1.6.0
-   * @see #setConnectorInfo(Class, Configuration, String, AuthenticationToken)
-   */
-  public static Boolean isConnectorInfoSet(Class<?> implementingClass, Configuration
conf) {
-    return conf.getBoolean(enumToConfKey(implementingClass, ConnectorInfo.IS_CONFIGURED),
false);
-  }
-
-  /**
-   * Gets the user name from the configuration.
-   *
-   * @param implementingClass
-   *          the class whose name will be used as a prefix for the property configuration
key
-   * @param conf
-   *          the Hadoop configuration object to configure
-   * @return the principal
-   * @since 1.6.0
-   * @see #setConnectorInfo(Class, Configuration, String, AuthenticationToken)
-   */
-  public static String getPrincipal(Class<?> implementingClass, Configuration conf)
{
-    Properties props = getClientProperties(implementingClass, conf);
-    return props.getProperty(ClientProperty.AUTH_PRINCIPAL.getKey());
-  }
-
-  /**
-   * Gets the authenticated token from either the specified token file or directly from the
-   * configuration, whichever was used when the job was configured.
-   *
-   * @param implementingClass
-   *          the class whose name will be used as a prefix for the property configuration
key
-   * @param conf
-   *          the Hadoop configuration object to configure
-   * @return the principal's authentication token
-   * @since 1.6.0
-   * @see #setConnectorInfo(Class, Configuration, String, AuthenticationToken)
    */
-  public static AuthenticationToken getAuthenticationToken(Class<?> implementingClass,
-      Configuration conf) {
-    Properties props = getClientProperties(implementingClass, conf);
-    return ClientProperty.getAuthenticationToken(props);
+  public static Boolean isClientConfigured(Class<?> implementingClass, Configuration
conf) {
+    return conf.getBoolean(enumToConfKey(implementingClass, ClientOpts.IS_CONFIGURED), false);
   }
 
   /**
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/InputConfigurator.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/InputConfigurator.java
index e1da2b9..19e7f0c 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/InputConfigurator.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/InputConfigurator.java
@@ -34,6 +34,7 @@ import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+import java.util.Properties;
 import java.util.Set;
 import java.util.StringTokenizer;
 
@@ -52,6 +53,7 @@ import org.apache.accumulo.core.clientImpl.ClientContext;
 import org.apache.accumulo.core.clientImpl.Table;
 import org.apache.accumulo.core.clientImpl.Tables;
 import org.apache.accumulo.core.clientImpl.TabletLocator;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.PartialKey;
 import org.apache.accumulo.core.data.Range;
@@ -733,10 +735,8 @@ public class InputConfigurator extends ConfiguratorBase {
       if (getInputTableConfigs(implementingClass, conf).size() == 0)
         throw new IOException("No table set.");
 
-      String principal = getPrincipal(implementingClass, conf);
-      if (principal == null) {
-        principal = getClientInfo(implementingClass, conf).getPrincipal();
-      }
+      Properties props = getClientProperties(implementingClass, conf);
+      String principal = ClientProperty.AUTH_PRINCIPAL.getValue(props);
 
       for (Map.Entry<String,InputTableConfig> tableConfig : inputTableConfigs.entrySet())
{
         if (!client.securityOperations().hasTablePermission(principal, tableConfig.getKey(),
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/OutputConfigurator.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/OutputConfigurator.java
index 9cdca7b..027355f 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/OutputConfigurator.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/OutputConfigurator.java
@@ -27,7 +27,6 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.accumulo.core.client.BatchWriter;
 import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.clientImpl.ClientInfo;
 import org.apache.accumulo.core.clientImpl.DurabilityImpl;
 import org.apache.hadoop.conf.Configuration;
 
@@ -93,8 +92,7 @@ public class OutputConfigurator extends ConfiguratorBase {
   public static BatchWriterConfig getBatchWriterOptions(Class<?> implementingClass,
       Configuration conf) {
     BatchWriterConfig bwConfig = new BatchWriterConfig();
-    ClientInfo info = getClientInfo(implementingClass, conf);
-    Properties props = info.getProperties();
+    Properties props = getClientProperties(implementingClass, conf);
     String property = props.getProperty(BATCH_WRITER_DURABILITY.getKey());
     if (property != null)
       bwConfig.setDurability(DurabilityImpl.fromString(property));
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloOutputFormatIT.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloOutputFormatIT.java
index 74ea09f..7291bad 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloOutputFormatIT.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloOutputFormatIT.java
@@ -227,5 +227,4 @@ public class AccumuloOutputFormatIT extends ConfigurableMacBase {
       }
     }
   }
-
 }
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBaseTest.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBaseTest.java
index 6334f85..cfe154e 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBaseTest.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBaseTest.java
@@ -18,12 +18,12 @@ package org.apache.accumulo.hadoopImpl.mapreduce.lib;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
+import java.util.Properties;
+
 import org.apache.accumulo.core.Constants;
 import org.apache.accumulo.core.client.Accumulo;
-import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.clientImpl.ClientInfo;
 import org.apache.hadoop.conf.Configuration;
@@ -44,38 +44,15 @@ public class ConfiguratorBaseTest {
   }
 
   @Test
-  public void testSetConnectorInfoClassOfQConfigurationStringAuthenticationToken() {
-    Configuration conf = new Configuration();
-    assertFalse(ConfiguratorBase.isConnectorInfoSet(this.getClass(), conf));
-    ConfiguratorBase.setConnectorInfo(this.getClass(), conf, "testUser",
-        new PasswordToken("testPassword"));
-    assertTrue(ConfiguratorBase.isConnectorInfoSet(this.getClass(), conf));
-    assertEquals("testUser", ConfiguratorBase.getPrincipal(this.getClass(), conf));
-    AuthenticationToken token = ConfiguratorBase.getAuthenticationToken(this.getClass(),
conf);
-    assertNotNull(token);
-    assertEquals(PasswordToken.class, token.getClass());
-    assertEquals(new PasswordToken("testPassword"), token);
-  }
-
-  @Test
-  public void testSetConnectorInfoClassOfQConfigurationStringString() {
-    Configuration conf = new Configuration();
-    assertFalse(ConfiguratorBase.isConnectorInfoSet(this.getClass(), conf));
-    ConfiguratorBase.setConnectorInfo(this.getClass(), conf, "testUser",
-        new PasswordToken("testPass"));
-    assertTrue(ConfiguratorBase.isConnectorInfoSet(this.getClass(), conf));
-    assertEquals("testUser", ConfiguratorBase.getPrincipal(this.getClass(), conf));
-    assertEquals("testPass", new String(((PasswordToken) ConfiguratorBase
-        .getClientInfo(this.getClass(), conf).getAuthenticationToken()).getPassword()));
-  }
-
-  @Test
-  public void testSetClientInfo() {
+  public void testSetClientProperties() {
     Configuration conf = new Configuration();
-    ClientInfo info = ClientInfo.from(
-        Accumulo.newClientProperties().to("myinstance", "myzookeepers").as("user", "pass").build());
-    ConfiguratorBase.setClientInfo(this.getClass(), conf, info);
-    ClientInfo info2 = ConfiguratorBase.getClientInfo(this.getClass(), conf);
+    Properties props = Accumulo.newClientProperties().to("myinstance", "myzookeepers")
+        .as("user", "pass").build();
+    assertFalse(ConfiguratorBase.isClientConfigured(this.getClass(), conf));
+    ConfiguratorBase.setClientProperties(this.getClass(), conf, props, null);
+    assertTrue(ConfiguratorBase.isClientConfigured(this.getClass(), conf));
+    Properties props2 = ConfiguratorBase.getClientProperties(this.getClass(), conf);
+    ClientInfo info2 = ClientInfo.from(props2);
     assertEquals("myinstance", info2.getInstanceName());
     assertEquals("myzookeepers", info2.getZooKeepers());
     assertEquals("user", info2.getPrincipal());


Mime
View raw message