accumulo-notifications mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From GitBox <...@apache.org>
Subject [GitHub] mikewalch closed pull request #894: Fix #883 Specify client props as HDFS path in new M/R API
Date Fri, 11 Jan 2019 23:01:09 GMT
mikewalch closed pull request #894: Fix #883 Specify client props as HDFS path in new M/R API
URL: https://github.com/apache/accumulo/pull/894
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapred/AccumuloOutputFormat.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapred/AccumuloOutputFormat.java
index 2386081334..cce542db38 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapred/AccumuloOutputFormat.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapred/AccumuloOutputFormat.java
@@ -17,13 +17,14 @@
 package org.apache.accumulo.hadoop.mapred;
 
 import java.io.IOException;
+import java.util.Properties;
 
 import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
-import org.apache.accumulo.core.clientImpl.ClientInfo;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.hadoop.mapreduce.OutputFormatBuilder;
 import org.apache.accumulo.hadoopImpl.mapred.AccumuloRecordWriter;
@@ -46,11 +47,10 @@
 
   @Override
   public void checkOutputSpecs(FileSystem ignored, JobConf job) throws IOException {
-    ClientInfo clientInfo = OutputConfigurator.getClientInfo(CLASS, job);
-    String principal = clientInfo.getPrincipal();
-    AuthenticationToken token = clientInfo.getAuthenticationToken();
-    try (AccumuloClient c = Accumulo.newClient().from(clientInfo.getProperties()).build())
{
-      if (!c.securityOperations().authenticateUser(principal, token))
+    Properties clientProps = OutputConfigurator.getClientProperties(CLASS, job);
+    AuthenticationToken token = ClientProperty.getAuthenticationToken(clientProps);
+    try (AccumuloClient c = Accumulo.newClient().from(clientProps).build()) {
+      if (!c.securityOperations().authenticateUser(c.whoami(), token))
         throw new IOException("Unable to authenticate user");
     } catch (AccumuloException | AccumuloSecurityException e) {
       throw new IOException(e);
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormat.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormat.java
index d85b2c8fff..4c8421117b 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormat.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormat.java
@@ -17,13 +17,14 @@
 package org.apache.accumulo.hadoop.mapreduce;
 
 import java.io.IOException;
+import java.util.Properties;
 
 import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
-import org.apache.accumulo.core.clientImpl.ClientInfo;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.hadoopImpl.mapreduce.AccumuloRecordWriter;
 import org.apache.accumulo.hadoopImpl.mapreduce.OutputFormatBuilderImpl;
@@ -57,11 +58,10 @@
 
   @Override
   public void checkOutputSpecs(JobContext job) throws IOException {
-    ClientInfo clientInfo = OutputConfigurator.getClientInfo(CLASS, job.getConfiguration());
-    String principal = clientInfo.getPrincipal();
-    AuthenticationToken token = clientInfo.getAuthenticationToken();
-    try (AccumuloClient c = Accumulo.newClient().from(clientInfo.getProperties()).build())
{
-      if (!c.securityOperations().authenticateUser(principal, token))
+    Properties clientProps = OutputConfigurator.getClientProperties(CLASS, job.getConfiguration());
+    AuthenticationToken token = ClientProperty.getAuthenticationToken(clientProps);
+    try (AccumuloClient c = Accumulo.newClient().from(clientProps).build()) {
+      if (!c.securityOperations().authenticateUser(c.whoami(), token))
         throw new IOException("Unable to authenticate user");
     } catch (AccumuloException | AccumuloSecurityException e) {
       throw new IOException(e);
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/InputFormatBuilder.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/InputFormatBuilder.java
index ddb4deb0c5..d274e7bd1b 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/InputFormatBuilder.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/InputFormatBuilder.java
@@ -45,14 +45,26 @@
    * @since 2.0
    */
   interface ClientParams<T> {
+
     /**
-     * Set the connection information needed to communicate with Accumulo in this job.
-     * clientProperties param can be created using {@link Accumulo#newClientProperties()}
+     * Set client properties needed to communicate with Accumulo for this job. This information
will
+     * be serialized into the configuration. Therefore, it is more secure to use
+     * {@link #clientPropertiesPath(String)}. Client properties can be created using
+     * {@link Accumulo#newClientProperties()}
      *
      * @param clientProperties
      *          Accumulo connection information
      */
     TableParams<T> clientProperties(Properties clientProperties);
+
+    /**
+     * Set path to DFS location containing accumulo-client.properties file. This setting
is more
+     * secure than {@link #clientProperties(Properties)}
+     *
+     * @param clientPropsPath
+     *          DFS path to accumulo-client.properties
+     */
+    TableParams<T> clientPropertiesPath(String clientPropsPath);
   }
 
   /**
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/OutputFormatBuilder.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/OutputFormatBuilder.java
index 678ba31efc..134346e137 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/OutputFormatBuilder.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/OutputFormatBuilder.java
@@ -36,12 +36,23 @@
   interface ClientParams<T> {
     /**
      * Set the connection information needed to communicate with Accumulo in this job.
-     * clientProperties param can be created using {@link Accumulo#newClientProperties()}
+     * clientProperties param can be created using {@link Accumulo#newClientProperties()}.
Client
+     * properties will be serialized into configuration. Therefore it is more secure to use
+     * {@link #clientPropertiesPath(String)}
      *
      * @param clientProperties
      *          Accumulo connection information
      */
     OutputOptions<T> clientProperties(Properties clientProperties);
+
+    /**
+     * Set path to DFS location containing accumulo-client.properties file. This setting
is more
+     * secure than {@link #clientProperties(Properties)}
+     *
+     * @param clientPropsPath
+     *          DFS path to accumulo-client.properties
+     */
+    OutputOptions<T> clientPropertiesPath(String clientPropsPath);
   }
 
   /**
@@ -80,5 +91,4 @@
      */
     void store(T j);
   }
-
 }
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordReader.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordReader.java
index a2e386e795..279e53cabd 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordReader.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordReader.java
@@ -31,7 +31,6 @@
 import java.util.Random;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.BatchScanner;
@@ -445,7 +444,6 @@ private static void validateOptions(JobConf job, Class<?> callingClass)
throws I
    * Creates {@link AccumuloClient} from the configuration
    */
   private static AccumuloClient createClient(JobConf job, Class<?> callingClass) {
-    return Accumulo.newClient()
-        .from(InputConfigurator.getClientInfo(callingClass, job).getProperties()).build();
+    return InputConfigurator.createClient(callingClass, job);
   }
 }
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordWriter.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordWriter.java
index 4ff1323d9c..5ce64a3d0e 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordWriter.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapred/AccumuloRecordWriter.java
@@ -22,7 +22,6 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
@@ -77,8 +76,7 @@ public AccumuloRecordWriter(JobConf job) {
     this.defaultTableName = (tname == null) ? null : new Text(tname);
 
     if (!simulate) {
-      this.client = Accumulo.newClient()
-          .from(OutputConfigurator.getClientInfo(CLASS, job).getProperties()).build();
+      this.client = OutputConfigurator.createClient(CLASS, job);
       mtbw = client.createMultiTableBatchWriter();
     }
   }
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordReader.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordReader.java
index 32fc759fa5..8e7d0685c1 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordReader.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordReader.java
@@ -31,7 +31,6 @@
 import java.util.Random;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.BatchScanner;
@@ -45,7 +44,6 @@
 import org.apache.accumulo.core.client.TableOfflineException;
 import org.apache.accumulo.core.client.sample.SamplerConfiguration;
 import org.apache.accumulo.core.clientImpl.ClientContext;
-import org.apache.accumulo.core.clientImpl.ClientInfo;
 import org.apache.accumulo.core.clientImpl.OfflineScanner;
 import org.apache.accumulo.core.clientImpl.ScannerImpl;
 import org.apache.accumulo.core.clientImpl.Table;
@@ -463,17 +461,10 @@ private static void validateOptions(JobContext context, Class<?>
callingClass)
     return splits;
   }
 
-  /**
-   * Gets the {@link ClientInfo} from the configuration
-   */
-  private static ClientInfo getClientInfo(JobContext context, Class<?> callingClass)
{
-    return InputConfigurator.getClientInfo(callingClass, context.getConfiguration());
-  }
-
   /**
    * Creates {@link AccumuloClient} from the configuration
    */
   private static AccumuloClient createClient(JobContext context, Class<?> callingClass)
{
-    return Accumulo.newClient().from(getClientInfo(context, callingClass).getProperties()).build();
+    return InputConfigurator.createClient(callingClass, context.getConfiguration());
   }
 }
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordWriter.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordWriter.java
index 3fafdb60d3..680d8139f2 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordWriter.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/AccumuloRecordWriter.java
@@ -22,7 +22,6 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
@@ -78,8 +77,7 @@ public AccumuloRecordWriter(TaskAttemptContext context) {
     this.defaultTableName = (tname == null) ? null : new Text(tname);
 
     if (!simulate) {
-      this.client = Accumulo.newClient()
-          .from(OutputConfigurator.getClientInfo(CLASS, conf).getProperties()).build();
+      this.client = OutputConfigurator.createClient(CLASS, conf);
       mtbw = client.createMultiTableBatchWriter();
     }
   }
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/InputFormatBuilderImpl.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/InputFormatBuilderImpl.java
index 119a921231..5b3bac0e87 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/InputFormatBuilderImpl.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/InputFormatBuilderImpl.java
@@ -30,7 +30,7 @@
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.IteratorSetting;
 import org.apache.accumulo.core.client.sample.SamplerConfiguration;
-import org.apache.accumulo.core.clientImpl.ClientInfo;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.hadoop.mapreduce.InputFormatBuilder;
@@ -46,11 +46,11 @@
     implements InputFormatBuilder, InputFormatBuilder.ClientParams<T>,
     InputFormatBuilder.TableParams<T>, InputFormatBuilder.InputFormatOptions<T>
{
 
-  Class<?> callingClass;
-  ClientInfo clientInfo;
-
-  String currentTable;
-  Map<String,InputTableConfig> tableConfigMap = Collections.emptyMap();
+  private Class<?> callingClass;
+  private Properties clientProps;
+  private String clientPropsPath;
+  private String currentTable;
+  private Map<String,InputTableConfig> tableConfigMap = Collections.emptyMap();
 
   public InputFormatBuilderImpl(Class<?> callingClass) {
     this.callingClass = callingClass;
@@ -58,8 +58,15 @@ public InputFormatBuilderImpl(Class<?> callingClass) {
 
   @Override
   public InputFormatBuilder.TableParams<T> clientProperties(Properties clientProperties)
{
-    this.clientInfo = ClientInfo
-        .from(Objects.requireNonNull(clientProperties, "clientProperties must not be null"));
+    this.clientProps = Objects.requireNonNull(clientProperties,
+        "clientProperties must not be null");
+    return this;
+  }
+
+  @Override
+  public TableParams<T> clientPropertiesPath(String clientPropsPath) {
+    this.clientPropsPath = Objects.requireNonNull(clientPropsPath,
+        "clientPropsPath must not be null");
     return this;
   }
 
@@ -182,7 +189,7 @@ private void store(Job job) throws AccumuloException, AccumuloSecurityException
   }
 
   private void _store(Configuration conf) throws AccumuloException, AccumuloSecurityException
{
-    InputConfigurator.setClientInfo(callingClass, conf, clientInfo);
+    InputConfigurator.setClientProperties(callingClass, conf, clientProps, clientPropsPath);
     if (tableConfigMap.size() == 0) {
       throw new IllegalArgumentException("At least one Table must be configured for job.");
     }
@@ -191,8 +198,13 @@ private void _store(Configuration conf) throws AccumuloException, AccumuloSecuri
       Map.Entry<String,InputTableConfig> entry = tableConfigMap.entrySet().iterator().next();
       InputConfigurator.setInputTableName(callingClass, conf, entry.getKey());
       InputTableConfig config = entry.getValue();
-      if (!config.getScanAuths().isPresent())
-        config.setScanAuths(getUserAuths(clientInfo));
+      if (!config.getScanAuths().isPresent()) {
+        Properties props = InputConfigurator.getClientProperties(callingClass, conf);
+        try (AccumuloClient c = Accumulo.newClient().from(props).build()) {
+          String principal = ClientProperty.AUTH_PRINCIPAL.getValue(props);
+          config.setScanAuths(c.securityOperations().getUserAuthorizations(principal));
+        }
+      }
       InputConfigurator.setScanAuthorizations(callingClass, conf, config.getScanAuths().get());
       // all optional values
       if (config.getContext().isPresent())
@@ -224,12 +236,4 @@ private void _store(Configuration conf) throws AccumuloException, AccumuloSecuri
   private void store(JobConf jobConf) throws AccumuloException, AccumuloSecurityException
{
     _store(jobConf);
   }
-
-  private Authorizations getUserAuths(ClientInfo clientInfo)
-      throws AccumuloSecurityException, AccumuloException {
-    try (AccumuloClient c = Accumulo.newClient().from(clientInfo.getProperties()).build())
{
-      return c.securityOperations().getUserAuthorizations(clientInfo.getPrincipal());
-    }
-  }
-
 }
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/OutputFormatBuilderImpl.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/OutputFormatBuilderImpl.java
index b0c76563df..d7582eaac6 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/OutputFormatBuilderImpl.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/OutputFormatBuilderImpl.java
@@ -20,7 +20,6 @@
 import java.util.Optional;
 import java.util.Properties;
 
-import org.apache.accumulo.core.clientImpl.ClientInfo;
 import org.apache.accumulo.hadoop.mapreduce.OutputFormatBuilder;
 import org.apache.accumulo.hadoopImpl.mapreduce.lib.OutputConfigurator;
 import org.apache.hadoop.conf.Configuration;
@@ -30,12 +29,13 @@
 public class OutputFormatBuilderImpl<T>
     implements OutputFormatBuilder.ClientParams<T>, OutputFormatBuilder.OutputOptions<T>
{
   private final Class<?> callingClass;
-  ClientInfo clientInfo;
+  private Properties clientProps;
+  private String clientPropsPath;
 
   // optional values
-  Optional<String> defaultTableName = Optional.empty();
-  boolean createTables = false;
-  boolean simulationMode = false;
+  private Optional<String> defaultTableName = Optional.empty();
+  private boolean createTables = false;
+  private boolean simulationMode = false;
 
   public OutputFormatBuilderImpl(Class<?> callingClass) {
     this.callingClass = callingClass;
@@ -43,8 +43,15 @@ public OutputFormatBuilderImpl(Class<?> callingClass) {
 
   @Override
   public OutputFormatBuilder.OutputOptions<T> clientProperties(Properties clientProperties)
{
-    this.clientInfo = ClientInfo
-        .from(Objects.requireNonNull(clientProperties, "ClientInfo must not be null"));
+    this.clientProps = Objects.requireNonNull(clientProperties,
+        "clientProperties must not be null");
+    return this;
+  }
+
+  @Override
+  public OutputFormatBuilder.OutputOptions<T> clientPropertiesPath(String clientPropsPath)
{
+    this.clientPropsPath = Objects.requireNonNull(clientPropsPath,
+        "clientPropsPath must not be null");
     return this;
   }
 
@@ -82,7 +89,7 @@ private void store(Job job) {
   }
 
   private void _store(Configuration conf) {
-    OutputConfigurator.setClientInfo(callingClass, conf, clientInfo);
+    OutputConfigurator.setClientProperties(callingClass, conf, clientProps, clientPropsPath);
     if (defaultTableName.isPresent())
       OutputConfigurator.setDefaultTableName(callingClass, conf, defaultTableName.get());
     OutputConfigurator.setCreateTables(callingClass, conf, createTables);
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBase.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBase.java
index dd2c3dc438..810320dd3a 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBase.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBase.java
@@ -16,8 +16,6 @@
  */
 package org.apache.accumulo.hadoopImpl.mapreduce.lib;
 
-import static com.google.common.base.Preconditions.checkArgument;
-
 import java.io.IOException;
 import java.io.StringReader;
 import java.io.StringWriter;
@@ -29,41 +27,19 @@
 import org.apache.accumulo.core.Constants;
 import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
-import org.apache.accumulo.core.client.admin.DelegationTokenConfig;
-import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
-import org.apache.accumulo.core.client.security.tokens.KerberosToken;
-import org.apache.accumulo.core.clientImpl.AuthenticationTokenIdentifier;
-import org.apache.accumulo.core.clientImpl.ClientInfo;
-import org.apache.accumulo.core.clientImpl.ClientInfoImpl;
-import org.apache.accumulo.core.clientImpl.DelegationTokenImpl;
-import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * @since 1.6.0
  */
 public class ConfiguratorBase {
 
-  private static final Logger log = LoggerFactory.getLogger(ConfiguratorBase.class);
-
-  /**
-   * Specifies that connection info was configured
-   *
-   * @since 1.6.0
-   */
-  public enum ConnectorInfo {
-    IS_CONFIGURED
-  }
-
   public enum ClientOpts {
-    CLIENT_PROPS, CLIENT_PROPS_FILE
+    CLIENT_PROPS, CLIENT_PROPS_FILE, IS_CONFIGURED
   }
 
   /**
@@ -102,69 +78,26 @@ protected static String enumToConfKey(Enum<?> e) {
         + StringUtils.camelize(e.name().toLowerCase());
   }
 
-  public static ClientInfo updateToken(org.apache.hadoop.security.Credentials credentials,
-      ClientInfo info) {
-    ClientInfo result = info;
-    if (info.getAuthenticationToken() instanceof KerberosToken) {
-      log.info("Received KerberosToken, attempting to fetch DelegationToken");
-      try (AccumuloClient client = Accumulo.newClient().from(info.getProperties()).build())
{
-        AuthenticationToken token = client.securityOperations()
-            .getDelegationToken(new DelegationTokenConfig());
-        result = ClientInfo.from(Accumulo.newClientProperties().from(info.getProperties())
-            .as(info.getPrincipal(), token).build());
-      } catch (Exception e) {
-        log.warn("Failed to automatically obtain DelegationToken, "
-            + "Mappers/Reducers will likely fail to communicate with Accumulo", e);
-      }
-    }
-    // DelegationTokens can be passed securely from user to task without serializing insecurely
in
-    // the configuration
-    if (info.getAuthenticationToken() instanceof DelegationTokenImpl) {
-      DelegationTokenImpl delegationToken = (DelegationTokenImpl) info.getAuthenticationToken();
-
-      // Convert it into a Hadoop Token
-      AuthenticationTokenIdentifier identifier = delegationToken.getIdentifier();
-      Token<AuthenticationTokenIdentifier> hadoopToken = new Token<>(identifier.getBytes(),
-          delegationToken.getPassword(), identifier.getKind(), delegationToken.getServiceName());
-
-      // Add the Hadoop Token to the Job so it gets serialized and passed along.
-      credentials.addToken(hadoopToken.getService(), hadoopToken);
-    }
-    return result;
-  }
-
-  public static void setClientInfo(Class<?> implementingClass, Configuration conf,
-      ClientInfo info) {
-    setClientProperties(implementingClass, conf, info.getProperties());
-    conf.setBoolean(enumToConfKey(implementingClass, ConnectorInfo.IS_CONFIGURED), true);
-  }
-
-  public static ClientInfo getClientInfo(Class<?> implementingClass, Configuration
conf) {
-    Properties props = getClientProperties(implementingClass, conf);
-    return new ClientInfoImpl(props);
-  }
-
-  public static void setClientPropertiesFile(Class<?> implementingClass, Configuration
conf,
-      String clientPropertiesFile) {
-    try {
-      DistributedCacheHelper.addCacheFile(new URI(clientPropertiesFile), conf);
-    } catch (URISyntaxException e) {
-      throw new IllegalStateException("Unable to add client properties file \""
-          + clientPropertiesFile + "\" to distributed cache.");
-    }
-    conf.set(enumToConfKey(implementingClass, ClientOpts.CLIENT_PROPS_FILE), clientPropertiesFile);
-    conf.setBoolean(enumToConfKey(implementingClass, ConnectorInfo.IS_CONFIGURED), true);
-  }
-
   public static void setClientProperties(Class<?> implementingClass, Configuration
conf,
-      Properties props) {
-    StringWriter writer = new StringWriter();
-    try {
-      props.store(writer, "client properties");
-    } catch (IOException e) {
-      throw new IllegalStateException(e);
+      Properties props, String clientPropsPath) {
+    if (clientPropsPath != null) {
+      try {
+        DistributedCacheHelper.addCacheFile(new URI(clientPropsPath), conf);
+      } catch (URISyntaxException e) {
+        throw new IllegalStateException("Unable to add client properties file \"" + clientPropsPath
+            + "\" to distributed cache.");
+      }
+      conf.set(enumToConfKey(implementingClass, ClientOpts.CLIENT_PROPS_FILE), clientPropsPath);
+    } else {
+      StringWriter writer = new StringWriter();
+      try {
+        props.store(writer, "client properties");
+      } catch (IOException e) {
+        throw new IllegalStateException(e);
+      }
+      conf.set(enumToConfKey(implementingClass, ClientOpts.CLIENT_PROPS), writer.toString());
     }
-    conf.set(enumToConfKey(implementingClass, ClientOpts.CLIENT_PROPS), writer.toString());
+    conf.setBoolean(enumToConfKey(implementingClass, ClientOpts.IS_CONFIGURED), true);
   }
 
   public static Properties getClientProperties(Class<?> implementingClass, Configuration
conf) {
@@ -207,35 +140,6 @@ public static Properties getClientProperties(Class<?> implementingClass,
Configu
     return props;
   }
 
-  /**
-   * Sets the connector information needed to communicate with Accumulo in this job.
-   *
-   * <p>
-   * <b>WARNING:</b> The serialized token is stored in the configuration and
shared with all
-   * MapReduce tasks. It is BASE64 encoded to provide a charset safe conversion to a string,
and is
-   * not intended to be secure.
-   *
-   * @param implementingClass
-   *          the class whose name will be used as a prefix for the property configuration
key
-   * @param conf
-   *          the Hadoop configuration object to configure
-   * @param principal
-   *          a valid Accumulo user name
-   * @param token
-   *          the user's password
-   * @since 1.6.0
-   */
-  public static void setConnectorInfo(Class<?> implementingClass, Configuration conf,
-      String principal, AuthenticationToken token) {
-    checkArgument(principal != null, "principal is null");
-    checkArgument(token != null, "token is null");
-    Properties props = getClientProperties(implementingClass, conf);
-    props.setProperty(ClientProperty.AUTH_PRINCIPAL.getKey(), principal);
-    ClientProperty.setAuthenticationToken(props, token);
-    setClientProperties(implementingClass, conf, props);
-    conf.setBoolean(enumToConfKey(implementingClass, ConnectorInfo.IS_CONFIGURED), true);
-  }
-
   /**
    * Determines if the connector info has already been set for this instance.
    *
@@ -245,44 +149,9 @@ public static void setConnectorInfo(Class<?> implementingClass,
Configuration co
    *          the Hadoop configuration object to configure
    * @return true if the connector info has already been set, false otherwise
    * @since 1.6.0
-   * @see #setConnectorInfo(Class, Configuration, String, AuthenticationToken)
-   */
-  public static Boolean isConnectorInfoSet(Class<?> implementingClass, Configuration
conf) {
-    return conf.getBoolean(enumToConfKey(implementingClass, ConnectorInfo.IS_CONFIGURED),
false);
-  }
-
-  /**
-   * Gets the user name from the configuration.
-   *
-   * @param implementingClass
-   *          the class whose name will be used as a prefix for the property configuration
key
-   * @param conf
-   *          the Hadoop configuration object to configure
-   * @return the principal
-   * @since 1.6.0
-   * @see #setConnectorInfo(Class, Configuration, String, AuthenticationToken)
-   */
-  public static String getPrincipal(Class<?> implementingClass, Configuration conf)
{
-    Properties props = getClientProperties(implementingClass, conf);
-    return props.getProperty(ClientProperty.AUTH_PRINCIPAL.getKey());
-  }
-
-  /**
-   * Gets the authenticated token from either the specified token file or directly from the
-   * configuration, whichever was used when the job was configured.
-   *
-   * @param implementingClass
-   *          the class whose name will be used as a prefix for the property configuration
key
-   * @param conf
-   *          the Hadoop configuration object to configure
-   * @return the principal's authentication token
-   * @since 1.6.0
-   * @see #setConnectorInfo(Class, Configuration, String, AuthenticationToken)
    */
-  public static AuthenticationToken getAuthenticationToken(Class<?> implementingClass,
-      Configuration conf) {
-    Properties props = getClientProperties(implementingClass, conf);
-    return ClientProperty.getAuthenticationToken(props);
+  public static Boolean isClientConfigured(Class<?> implementingClass, Configuration
conf) {
+    return conf.getBoolean(enumToConfKey(implementingClass, ClientOpts.IS_CONFIGURED), false);
   }
 
   /**
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/InputConfigurator.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/InputConfigurator.java
index e1da2b9db7..19e7f0cefb 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/InputConfigurator.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/InputConfigurator.java
@@ -34,6 +34,7 @@
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+import java.util.Properties;
 import java.util.Set;
 import java.util.StringTokenizer;
 
@@ -52,6 +53,7 @@
 import org.apache.accumulo.core.clientImpl.Table;
 import org.apache.accumulo.core.clientImpl.Tables;
 import org.apache.accumulo.core.clientImpl.TabletLocator;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.PartialKey;
 import org.apache.accumulo.core.data.Range;
@@ -733,10 +735,8 @@ public static void validatePermissions(Class<?> implementingClass,
Configuration
       if (getInputTableConfigs(implementingClass, conf).size() == 0)
         throw new IOException("No table set.");
 
-      String principal = getPrincipal(implementingClass, conf);
-      if (principal == null) {
-        principal = getClientInfo(implementingClass, conf).getPrincipal();
-      }
+      Properties props = getClientProperties(implementingClass, conf);
+      String principal = ClientProperty.AUTH_PRINCIPAL.getValue(props);
 
       for (Map.Entry<String,InputTableConfig> tableConfig : inputTableConfigs.entrySet())
{
         if (!client.securityOperations().hasTablePermission(principal, tableConfig.getKey(),
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/OutputConfigurator.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/OutputConfigurator.java
index 9cdca7b64e..027355f96e 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/OutputConfigurator.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/OutputConfigurator.java
@@ -27,7 +27,6 @@
 
 import org.apache.accumulo.core.client.BatchWriter;
 import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.clientImpl.ClientInfo;
 import org.apache.accumulo.core.clientImpl.DurabilityImpl;
 import org.apache.hadoop.conf.Configuration;
 
@@ -93,8 +92,7 @@ public static String getDefaultTableName(Class<?> implementingClass,
Configurati
   public static BatchWriterConfig getBatchWriterOptions(Class<?> implementingClass,
       Configuration conf) {
     BatchWriterConfig bwConfig = new BatchWriterConfig();
-    ClientInfo info = getClientInfo(implementingClass, conf);
-    Properties props = info.getProperties();
+    Properties props = getClientProperties(implementingClass, conf);
     String property = props.getProperty(BATCH_WRITER_DURABILITY.getKey());
     if (property != null)
       bwConfig.setDurability(DurabilityImpl.fromString(property));
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloOutputFormatIT.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloOutputFormatIT.java
index 74ea09f994..7291bad268 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloOutputFormatIT.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloOutputFormatIT.java
@@ -227,5 +227,4 @@ public void testMR() throws Exception {
       }
     }
   }
-
 }
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBaseTest.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBaseTest.java
index 6334f85400..cfe154e05e 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBaseTest.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/ConfiguratorBaseTest.java
@@ -18,12 +18,12 @@
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
+import java.util.Properties;
+
 import org.apache.accumulo.core.Constants;
 import org.apache.accumulo.core.client.Accumulo;
-import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.clientImpl.ClientInfo;
 import org.apache.hadoop.conf.Configuration;
@@ -44,38 +44,15 @@ public void testEnumToConfKey() {
   }
 
   @Test
-  public void testSetConnectorInfoClassOfQConfigurationStringAuthenticationToken() {
-    Configuration conf = new Configuration();
-    assertFalse(ConfiguratorBase.isConnectorInfoSet(this.getClass(), conf));
-    ConfiguratorBase.setConnectorInfo(this.getClass(), conf, "testUser",
-        new PasswordToken("testPassword"));
-    assertTrue(ConfiguratorBase.isConnectorInfoSet(this.getClass(), conf));
-    assertEquals("testUser", ConfiguratorBase.getPrincipal(this.getClass(), conf));
-    AuthenticationToken token = ConfiguratorBase.getAuthenticationToken(this.getClass(),
conf);
-    assertNotNull(token);
-    assertEquals(PasswordToken.class, token.getClass());
-    assertEquals(new PasswordToken("testPassword"), token);
-  }
-
-  @Test
-  public void testSetConnectorInfoClassOfQConfigurationStringString() {
-    Configuration conf = new Configuration();
-    assertFalse(ConfiguratorBase.isConnectorInfoSet(this.getClass(), conf));
-    ConfiguratorBase.setConnectorInfo(this.getClass(), conf, "testUser",
-        new PasswordToken("testPass"));
-    assertTrue(ConfiguratorBase.isConnectorInfoSet(this.getClass(), conf));
-    assertEquals("testUser", ConfiguratorBase.getPrincipal(this.getClass(), conf));
-    assertEquals("testPass", new String(((PasswordToken) ConfiguratorBase
-        .getClientInfo(this.getClass(), conf).getAuthenticationToken()).getPassword()));
-  }
-
-  @Test
-  public void testSetClientInfo() {
+  public void testSetClientProperties() {
     Configuration conf = new Configuration();
-    ClientInfo info = ClientInfo.from(
-        Accumulo.newClientProperties().to("myinstance", "myzookeepers").as("user", "pass").build());
-    ConfiguratorBase.setClientInfo(this.getClass(), conf, info);
-    ClientInfo info2 = ConfiguratorBase.getClientInfo(this.getClass(), conf);
+    Properties props = Accumulo.newClientProperties().to("myinstance", "myzookeepers")
+        .as("user", "pass").build();
+    assertFalse(ConfiguratorBase.isClientConfigured(this.getClass(), conf));
+    ConfiguratorBase.setClientProperties(this.getClass(), conf, props, null);
+    assertTrue(ConfiguratorBase.isClientConfigured(this.getClass(), conf));
+    Properties props2 = ConfiguratorBase.getClientProperties(this.getClass(), conf);
+    ClientInfo info2 = ClientInfo.from(props2);
     assertEquals("myinstance", info2.getInstanceName());
     assertEquals("myzookeepers", info2.getZooKeepers());
     assertEquals("user", info2.getPrincipal());


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

Mime
View raw message