accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mmil...@apache.org
Subject [accumulo] branch master updated: Remove CachedConfiguration (#916)
Date Fri, 25 Jan 2019 23:39:14 GMT
This is an automated email from the ASF dual-hosted git repository.

mmiller pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/accumulo.git


The following commit(s) were added to refs/heads/master by this push:
     new a423f95  Remove CachedConfiguration (#916)
a423f95 is described below

commit a423f954cbeca9ac0873e147f7e00124b2d116a7
Author: Mike Miller <mmiller@apache.org>
AuthorDate: Fri Jan 25 18:39:10 2019 -0500

    Remove CachedConfiguration (#916)
    
    * Replace static class with non static instances
---
 .../security/tokens/CredentialProviderToken.java   |  3 +-
 .../core/conf/CredentialProviderFactoryShim.java   | 15 ----------
 .../accumulo/core/conf/SiteConfiguration.java      |  4 +--
 .../accumulo/core/util/CachedConfiguration.java    | 35 ----------------------
 .../apache/accumulo/core/volume/VolumeImpl.java    |  6 ++--
 .../conf/CredentialProviderFactoryShimTest.java    |  4 ++-
 .../accumulo/core/volume/VolumeImplTest.java       | 14 ++++++---
 .../miniclusterImpl/MiniAccumuloConfigImpl.java    |  3 +-
 .../accumulo/server/fs/VolumeManagerImpl.java      | 22 +++++++-------
 .../tserver/TabletServerSyncCheckTest.java         |  2 +-
 10 files changed, 34 insertions(+), 74 deletions(-)

diff --git a/core/src/main/java/org/apache/accumulo/core/client/security/tokens/CredentialProviderToken.java
b/core/src/main/java/org/apache/accumulo/core/client/security/tokens/CredentialProviderToken.java
index ee2d60d..de6e2cb 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/security/tokens/CredentialProviderToken.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/security/tokens/CredentialProviderToken.java
@@ -24,7 +24,6 @@ import java.util.LinkedHashSet;
 import java.util.Set;
 
 import org.apache.accumulo.core.conf.CredentialProviderFactoryShim;
-import org.apache.accumulo.core.util.CachedConfiguration;
 import org.apache.hadoop.conf.Configuration;
 
 /**
@@ -51,7 +50,7 @@ public class CredentialProviderToken extends PasswordToken {
       throws IOException {
     this.name = name;
     this.credentialProviders = credentialProviders;
-    final Configuration conf = new Configuration(CachedConfiguration.getInstance());
+    final Configuration conf = new Configuration();
     conf.set(CredentialProviderFactoryShim.CREDENTIAL_PROVIDER_PATH, credentialProviders);
 
     char[] password = CredentialProviderFactoryShim.getValueFromCredentialProvider(conf,
name);
diff --git a/core/src/main/java/org/apache/accumulo/core/conf/CredentialProviderFactoryShim.java
b/core/src/main/java/org/apache/accumulo/core/conf/CredentialProviderFactoryShim.java
index 457be13..2f69e0d 100644
--- a/core/src/main/java/org/apache/accumulo/core/conf/CredentialProviderFactoryShim.java
+++ b/core/src/main/java/org/apache/accumulo/core/conf/CredentialProviderFactoryShim.java
@@ -26,7 +26,6 @@ import java.util.Collections;
 import java.util.List;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.accumulo.core.util.CachedConfiguration;
 import org.apache.hadoop.conf.Configuration;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -312,20 +311,6 @@ public class CredentialProviderFactoryShim {
   }
 
   /**
-   * Create a Hadoop {@link Configuration} with the appropriate members to access
-   * CredentialProviders
-   *
-   * @param credentialProviders
-   *          Comma-separated list of CredentialProvider URLs
-   * @return Configuration to be used for CredentialProvider
-   */
-  public static Configuration getConfiguration(String credentialProviders) {
-    requireNonNull(credentialProviders);
-    return getConfiguration(new Configuration(CachedConfiguration.getInstance()),
-        credentialProviders);
-  }
-
-  /**
    * Adds the Credential Provider configuration elements to the provided {@link Configuration}.
    *
    * @param conf
diff --git a/core/src/main/java/org/apache/accumulo/core/conf/SiteConfiguration.java b/core/src/main/java/org/apache/accumulo/core/conf/SiteConfiguration.java
index 5a84f42..e875318 100644
--- a/core/src/main/java/org/apache/accumulo/core/conf/SiteConfiguration.java
+++ b/core/src/main/java/org/apache/accumulo/core/conf/SiteConfiguration.java
@@ -27,7 +27,6 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.function.Predicate;
 
-import org.apache.accumulo.core.util.CachedConfiguration;
 import org.apache.commons.configuration.CompositeConfiguration;
 import org.apache.commons.configuration.ConfigurationException;
 import org.apache.commons.configuration.PropertiesConfiguration;
@@ -111,8 +110,7 @@ public class SiteConfiguration extends AccumuloConfiguration {
     // Add sensitive properties from credential provider (if set)
     String credProvider = result.get(Property.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS.getKey());
     if (credProvider != null) {
-      org.apache.hadoop.conf.Configuration hadoopConf = new org.apache.hadoop.conf.Configuration(
-          CachedConfiguration.getInstance());
+      org.apache.hadoop.conf.Configuration hadoopConf = new org.apache.hadoop.conf.Configuration();
       hadoopConf.set(CredentialProviderFactoryShim.CREDENTIAL_PROVIDER_PATH, credProvider);
       for (Property property : Property.values()) {
         if (property.isSensitive()) {
diff --git a/core/src/main/java/org/apache/accumulo/core/util/CachedConfiguration.java b/core/src/main/java/org/apache/accumulo/core/util/CachedConfiguration.java
deleted file mode 100644
index f5206c6..0000000
--- a/core/src/main/java/org/apache/accumulo/core/util/CachedConfiguration.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.core.util;
-
-import org.apache.hadoop.conf.Configuration;
-
-public class CachedConfiguration {
-  private static Configuration configuration = null;
-
-  public static synchronized Configuration getInstance() {
-    if (configuration == null)
-      setInstance(new Configuration());
-    return configuration;
-  }
-
-  public static synchronized Configuration setInstance(Configuration update) {
-    Configuration result = configuration;
-    configuration = update;
-    return result;
-  }
-}
diff --git a/core/src/main/java/org/apache/accumulo/core/volume/VolumeImpl.java b/core/src/main/java/org/apache/accumulo/core/volume/VolumeImpl.java
index a19b215..0ae6690 100644
--- a/core/src/main/java/org/apache/accumulo/core/volume/VolumeImpl.java
+++ b/core/src/main/java/org/apache/accumulo/core/volume/VolumeImpl.java
@@ -21,7 +21,6 @@ import static java.util.Objects.requireNonNull;
 import java.io.IOException;
 import java.util.Objects;
 
-import org.apache.accumulo.core.util.CachedConfiguration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -37,6 +36,7 @@ public class VolumeImpl implements Volume {
 
   protected final FileSystem fs;
   protected final String basePath;
+  private final Configuration hadoopConf;
 
   public VolumeImpl(Path path, Configuration conf) throws IOException {
     requireNonNull(path);
@@ -44,6 +44,7 @@ public class VolumeImpl implements Volume {
 
     this.fs = path.getFileSystem(conf);
     this.basePath = path.toUri().getPath();
+    this.hadoopConf = conf;
   }
 
   public VolumeImpl(FileSystem fs, String basePath) {
@@ -52,6 +53,7 @@ public class VolumeImpl implements Volume {
 
     this.fs = fs;
     this.basePath = basePath;
+    this.hadoopConf = fs.getConf();
   }
 
   @Override
@@ -75,7 +77,7 @@ public class VolumeImpl implements Volume {
 
     FileSystem other;
     try {
-      other = p.getFileSystem(CachedConfiguration.getInstance());
+      other = p.getFileSystem(hadoopConf);
     } catch (IOException e) {
       log.warn("Could not determine filesystem from path: {}", p, e);
       return false;
diff --git a/core/src/test/java/org/apache/accumulo/core/conf/CredentialProviderFactoryShimTest.java
b/core/src/test/java/org/apache/accumulo/core/conf/CredentialProviderFactoryShimTest.java
index 5e82bfb..5083f68 100644
--- a/core/src/test/java/org/apache/accumulo/core/conf/CredentialProviderFactoryShimTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/conf/CredentialProviderFactoryShimTest.java
@@ -44,6 +44,7 @@ import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
 
 public class CredentialProviderFactoryShimTest {
 
+  private static final Configuration hadoopConf = new Configuration();
   private static final Logger log = LoggerFactory
       .getLogger(CredentialProviderFactoryShimTest.class);
 
@@ -148,7 +149,8 @@ public class CredentialProviderFactoryShimTest {
   @Test
   public void testConfigurationCreation() {
     final String path = "jceks://file/tmp/foo.jks";
-    final Configuration actualConf = CredentialProviderFactoryShim.getConfiguration(path);
+    final Configuration actualConf = CredentialProviderFactoryShim.getConfiguration(hadoopConf,
+        path);
     assertNotNull(actualConf);
     assertEquals(path, actualConf.get(CredentialProviderFactoryShim.CREDENTIAL_PROVIDER_PATH));
   }
diff --git a/core/src/test/java/org/apache/accumulo/core/volume/VolumeImplTest.java b/core/src/test/java/org/apache/accumulo/core/volume/VolumeImplTest.java
index ec240f0..90e39ef 100644
--- a/core/src/test/java/org/apache/accumulo/core/volume/VolumeImplTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/volume/VolumeImplTest.java
@@ -25,6 +25,7 @@ import static org.junit.Assert.assertTrue;
 
 import java.net.URI;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.junit.Test;
@@ -33,16 +34,19 @@ public class VolumeImplTest {
 
   @Test
   public void testFileSystemInequivalence() {
+    Configuration hadoopConf = createMock(Configuration.class);
     FileSystem fs = createMock(FileSystem.class), other = createMock(FileSystem.class);
-    String basePath = "/accumulo";
 
-    VolumeImpl volume = new VolumeImpl(fs, basePath);
+    String basePath = "/accumulo";
 
+    expect(fs.getConf()).andReturn(hadoopConf).anyTimes();
     expect(fs.getUri()).andReturn(URI.create("hdfs://localhost:8020")).anyTimes();
     expect(other.getUri()).andReturn(URI.create("hdfs://otherhost:8020")).anyTimes();
 
     replay(fs, other);
 
+    VolumeImpl volume = new VolumeImpl(fs, basePath);
+
     assertFalse(volume.equivalentFileSystems(other));
 
     verify(fs, other);
@@ -50,16 +54,18 @@ public class VolumeImplTest {
 
   @Test
   public void testFileSystemEquivalence() {
+    Configuration hadoopConf = createMock(Configuration.class);
     FileSystem fs = createMock(FileSystem.class), other = createMock(FileSystem.class);
     String basePath = "/accumulo";
 
-    VolumeImpl volume = new VolumeImpl(fs, basePath);
-
+    expect(fs.getConf()).andReturn(hadoopConf).anyTimes();
     expect(fs.getUri()).andReturn(URI.create("hdfs://myhost:8020")).anyTimes();
     expect(other.getUri()).andReturn(URI.create("hdfs://myhost:8020")).anyTimes();
 
     replay(fs, other);
 
+    VolumeImpl volume = new VolumeImpl(fs, basePath);
+
     assertTrue(volume.equivalentFileSystems(other));
 
     verify(fs, other);
diff --git a/minicluster/src/main/java/org/apache/accumulo/miniclusterImpl/MiniAccumuloConfigImpl.java
b/minicluster/src/main/java/org/apache/accumulo/miniclusterImpl/MiniAccumuloConfigImpl.java
index f69c80e..9cea96b 100644
--- a/minicluster/src/main/java/org/apache/accumulo/miniclusterImpl/MiniAccumuloConfigImpl.java
+++ b/minicluster/src/main/java/org/apache/accumulo/miniclusterImpl/MiniAccumuloConfigImpl.java
@@ -203,7 +203,8 @@ public class MiniAccumuloConfigImpl {
 
     File keystoreFile = new File(getConfDir(), "credential-provider.jks");
     String keystoreUri = "jceks://file" + keystoreFile.getAbsolutePath();
-    Configuration conf = CredentialProviderFactoryShim.getConfiguration(keystoreUri);
+    Configuration conf = CredentialProviderFactoryShim.getConfiguration(getHadoopConfiguration(),
+        keystoreUri);
 
     // Set the URI on the siteCfg
     siteConfig.put(Property.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS.getKey(), keystoreUri);
diff --git a/server/base/src/main/java/org/apache/accumulo/server/fs/VolumeManagerImpl.java
b/server/base/src/main/java/org/apache/accumulo/server/fs/VolumeManagerImpl.java
index d6988b6..0ba12b4 100644
--- a/server/base/src/main/java/org/apache/accumulo/server/fs/VolumeManagerImpl.java
+++ b/server/base/src/main/java/org/apache/accumulo/server/fs/VolumeManagerImpl.java
@@ -35,7 +35,6 @@ import org.apache.accumulo.core.conf.Property;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.dataImpl.KeyExtent;
 import org.apache.accumulo.core.file.rfile.RFile;
-import org.apache.accumulo.core.util.CachedConfiguration;
 import org.apache.accumulo.core.volume.NonConfiguredVolume;
 import org.apache.accumulo.core.volume.Volume;
 import org.apache.accumulo.core.volume.VolumeConfiguration;
@@ -71,9 +70,10 @@ public class VolumeManagerImpl implements VolumeManager {
   private final Multimap<URI,Volume> volumesByFileSystemUri;
   private final Volume defaultVolume;
   private final VolumeChooser chooser;
+  private final Configuration hadoopConf;
 
   protected VolumeManagerImpl(Map<String,Volume> volumes, Volume defaultVolume,
-      AccumuloConfiguration conf) {
+      AccumuloConfiguration conf, Configuration hadoopConf) {
     this.volumesByName = volumes;
     this.defaultVolume = defaultVolume;
     // We may have multiple directories used in a single FileSystem (e.g. testing)
@@ -94,6 +94,7 @@ public class VolumeManagerImpl implements VolumeManager {
           "Failed to load volume chooser specified by " + Property.GENERAL_VOLUME_CHOOSER);
     }
     chooser = chooser1;
+    this.hadoopConf = hadoopConf;
   }
 
   private void invertVolumesByFileSystem(Map<String,Volume> forward,
@@ -106,13 +107,14 @@ public class VolumeManagerImpl implements VolumeManager {
   public static org.apache.accumulo.server.fs.VolumeManager getLocal(String localBasePath)
       throws IOException {
     AccumuloConfiguration accConf = DefaultConfiguration.getInstance();
-    Volume defaultLocalVolume = VolumeConfiguration
-        .create(FileSystem.getLocal(CachedConfiguration.getInstance()), localBasePath);
+    Configuration hadoopConf = new Configuration();
+    Volume defaultLocalVolume = VolumeConfiguration.create(FileSystem.getLocal(hadoopConf),
+        localBasePath);
 
     // The default volume gets placed in the map, but local filesystem is only used for testing
     // purposes
     return new VolumeManagerImpl(Collections.singletonMap(DEFAULT, defaultLocalVolume),
-        defaultLocalVolume, accConf);
+        defaultLocalVolume, accConf, hadoopConf);
   }
 
   @Override
@@ -261,7 +263,7 @@ public class VolumeManagerImpl implements VolumeManager {
   public Volume getVolumeByPath(Path path) {
     if (path.toString().contains(":")) {
       try {
-        FileSystem desiredFs = path.getFileSystem(CachedConfiguration.getInstance());
+        FileSystem desiredFs = path.getFileSystem(hadoopConf);
         URI desiredFsUri = desiredFs.getUri();
         Collection<Volume> candidateVolumes = volumesByFileSystemUri.get(desiredFsUri);
         if (candidateVolumes != null) {
@@ -357,8 +359,8 @@ public class VolumeManagerImpl implements VolumeManager {
       }
     }
 
-    return new VolumeManagerImpl(volumes, VolumeConfiguration.getDefaultVolume(hadoopConf,
conf),
-        conf);
+    Volume defaultVolume = VolumeConfiguration.getDefaultVolume(hadoopConf, conf);
+    return new VolumeManagerImpl(volumes, defaultVolume, conf, hadoopConf);
   }
 
   @Override
@@ -395,8 +397,8 @@ public class VolumeManagerImpl implements VolumeManager {
   @Override
   public Path matchingFileSystem(Path source, String[] options) {
     try {
-      if (ViewFSUtils.isViewFS(source, CachedConfiguration.getInstance())) {
-        return ViewFSUtils.matchingFileSystem(source, options, CachedConfiguration.getInstance());
+      if (ViewFSUtils.isViewFS(source, hadoopConf)) {
+        return ViewFSUtils.matchingFileSystem(source, options, hadoopConf);
       }
     } catch (IOException e) {
       throw new RuntimeException(e);
diff --git a/server/tserver/src/test/java/org/apache/accumulo/tserver/TabletServerSyncCheckTest.java
b/server/tserver/src/test/java/org/apache/accumulo/tserver/TabletServerSyncCheckTest.java
index 646a84d..7bdc4b4 100644
--- a/server/tserver/src/test/java/org/apache/accumulo/tserver/TabletServerSyncCheckTest.java
+++ b/server/tserver/src/test/java/org/apache/accumulo/tserver/TabletServerSyncCheckTest.java
@@ -71,7 +71,7 @@ public class TabletServerSyncCheckTest {
 
     public TestVolumeManagerImpl(Map<String,Volume> volumes) {
       super(volumes, volumes.values().iterator().next(),
-          new ConfigurationCopy(Collections.emptyMap()));
+          new ConfigurationCopy(Collections.emptyMap()), new Configuration());
     }
 
     @Override


Mime
View raw message