hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From j...@apache.org
Subject [hbase] branch branch-1 updated: HBASE-23621 Reduced the number of Checkstyle violations in tests of hbase-common
Date Mon, 27 Jan 2020 12:05:40 GMT
This is an automated email from the ASF dual-hosted git repository.

janh pushed a commit to branch branch-1
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-1 by this push:
     new 2267ab9  HBASE-23621 Reduced the number of Checkstyle violations in tests of hbase-common
2267ab9 is described below

commit 2267ab9399b0e3d1b2714aa5ee311fd0bbf52c31
Author: Jan Hentschel <janh@apache.org>
AuthorDate: Mon Jan 27 13:05:25 2020 +0100

    HBASE-23621 Reduced the number of Checkstyle violations in tests of hbase-common
    
    Signed-off-by: stack <stack@apache.org>
---
 .../java/org/apache/hadoop/hbase/ClassFinder.java  |  55 ++++----
 .../hadoop/hbase/HBaseCommonTestingUtility.java    |  43 +++---
 .../org/apache/hadoop/hbase/ResourceChecker.java   |  22 ++--
 .../java/org/apache/hadoop/hbase/TestCellUtil.java |  44 +++----
 .../org/apache/hadoop/hbase/TestClassFinder.java   |  17 +--
 .../hadoop/hbase/TestHBaseConfiguration.java       |  51 ++------
 .../hadoop/hbase/codec/TestCellCodecWithTags.java  |   3 +-
 .../hbase/codec/TestKeyValueCodecWithTags.java     |   3 +-
 .../hadoop/hbase/io/crypto/TestEncryption.java     |  36 ++---
 .../hadoop/hbase/util/RedundantKVGenerator.java    | 145 ++++-----------------
 .../apache/hadoop/hbase/HBaseTestingUtility.java   |   3 +-
 11 files changed, 155 insertions(+), 267 deletions(-)

diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
index ba6d8a2..e16303b 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase;
 
 import java.io.File;
@@ -53,24 +52,32 @@ public class ClassFinder {
 
   public interface ResourcePathFilter {
     boolean isCandidatePath(String resourcePath, boolean isJar);
-  };
+  }
 
   public interface FileNameFilter {
     boolean isCandidateFile(String fileName, String absFilePath);
-  };
+  }
 
   public interface ClassFilter {
     boolean isCandidateClass(Class<?> c);
-  };
+  }
 
   public static class Not implements ResourcePathFilter, FileNameFilter, ClassFilter {
     private ResourcePathFilter resourcePathFilter;
     private FileNameFilter fileNameFilter;
     private ClassFilter classFilter;
 
-    public Not(ResourcePathFilter resourcePathFilter){this.resourcePathFilter = resourcePathFilter;}
-    public Not(FileNameFilter fileNameFilter){this.fileNameFilter = fileNameFilter;}
-    public Not(ClassFilter classFilter){this.classFilter = classFilter;}
+    public Not(ResourcePathFilter resourcePathFilter) {
+      this.resourcePathFilter = resourcePathFilter;
+    }
+
+    public Not(FileNameFilter fileNameFilter) {
+      this.fileNameFilter = fileNameFilter;
+    }
+
+    public Not(ClassFilter classFilter) {
+      this.classFilter = classFilter;
+    }
 
     @Override
     public boolean isCandidatePath(String resourcePath, boolean isJar) {
@@ -90,7 +97,10 @@ public class ClassFinder {
     ClassFilter[] classFilters;
     ResourcePathFilter[] resourcePathFilters;
 
-    public And(ClassFilter...classFilters) { this.classFilters = classFilters; }
+    public And(ClassFilter...classFilters) {
+      this.classFilters = classFilters;
+    }
+
     public And(ResourcePathFilter... resourcePathFilters) {
       this.resourcePathFilters = resourcePathFilters;
     }
@@ -120,10 +130,6 @@ public class ClassFinder {
     this(null, null, null, classLoader);
   }
 
-  public ClassFinder() {
-    this(ClassLoader.getSystemClassLoader());
-  }
-
   public ClassFinder(ResourcePathFilter resourcePathFilter, FileNameFilter fileNameFilter,
       ClassFilter classFilter) {
     this(resourcePathFilter, fileNameFilter, classFilter, ClassLoader.getSystemClassLoader());
@@ -180,7 +186,7 @@ public class ClassFinder {
       }
     }
 
-    Set<Class<?>> classes = new HashSet<Class<?>>();
+    Set<Class<?>> classes = new HashSet<>();
     for (File directory : dirs) {
       classes.addAll(findClassesFromFiles(directory, packageName, proceedOnExceptions));
     }
@@ -193,7 +199,7 @@ public class ClassFinder {
   private Set<Class<?>> findClassesFromJar(String jarFileName,
       String packageName, boolean proceedOnExceptions)
     throws IOException, ClassNotFoundException, LinkageError {
-    JarInputStream jarFile = null;
+    JarInputStream jarFile;
     try {
       jarFile = new JarInputStream(new FileInputStream(jarFileName));
     } catch (IOException ioEx) {
@@ -201,8 +207,8 @@ public class ClassFinder {
       throw ioEx;
     }
 
-    Set<Class<?>> classes = new HashSet<Class<?>>();
-    JarEntry entry = null;
+    Set<Class<?>> classes = new HashSet<>();
+    JarEntry entry;
     try {
       while (true) {
         try {
@@ -248,7 +254,7 @@ public class ClassFinder {
 
   private Set<Class<?>> findClassesFromFiles(File baseDirectory, String packageName,
       boolean proceedOnExceptions) throws ClassNotFoundException, LinkageError {
-    Set<Class<?>> classes = new HashSet<Class<?>>();
+    Set<Class<?>> classes = new HashSet<>();
     if (!baseDirectory.exists()) {
       LOG.warn("Failed to find " + baseDirectory.getAbsolutePath());
       return classes;
@@ -285,16 +291,11 @@ public class ClassFinder {
       Class<?> c = Class.forName(className, false, classLoader);
       boolean isCandidateClass = null == classFilter || classFilter.isCandidateClass(c);
       return isCandidateClass ? c : null;
-    } catch (ClassNotFoundException classNotFoundEx) {
-      if (!proceedOnExceptions) {
-        throw classNotFoundEx;
-      }
-      LOG.debug("Failed to instantiate or check " + className + ": " + classNotFoundEx);
-    } catch (LinkageError linkageEx) {
+    } catch (ClassNotFoundException | LinkageError exception) {
       if (!proceedOnExceptions) {
-        throw linkageEx;
+        throw exception;
       }
-      LOG.debug("Failed to instantiate or check " + className + ": " + linkageEx);
+      LOG.debug("Failed to instantiate or check " + className + ": " + exception);
     }
     return null;
   }
@@ -313,5 +314,5 @@ public class ClassFinder {
               && (null == nameFilter
                 || nameFilter.isCandidateFile(file.getName(), file.getAbsolutePath())));
     }
-  };
-};
+  }
+}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
index 02f2799..19a9ac2 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
@@ -76,9 +76,8 @@ public class HBaseCommonTestingUtility {
   private File dataTestDir = null;
 
   /**
-   * @return Where to write test data on local filesystem, specific to
-   * the test.  Useful for tests that do not use a cluster.
-   * Creates it if it does not exist already.
+   * @return Where to write test data on local filesystem, specific to the test. Useful for
tests
+   *    that do not use a cluster. Creates it if it does not exist already.
    */
   public Path getDataTestDir() {
     if (this.dataTestDir == null) {
@@ -88,10 +87,9 @@ public class HBaseCommonTestingUtility {
   }
 
   /**
-   * @param subdirName
-   * @return Path to a subdirectory named <code>subdirName</code> under
-   * {@link #getDataTestDir()}.
-   * Does *NOT* create it if it does not exist.
+   * @param subdirName the name of the subdirectory in the test data directory
+   * @return Path to a subdirectory named {code subdirName} under
+   *  {@link #getDataTestDir()}. Does *NOT* create it if it does not exist.
    */
   public Path getDataTestDir(final String subdirName) {
     return new Path(getDataTestDir(), subdirName);
@@ -115,7 +113,10 @@ public class HBaseCommonTestingUtility {
     this.dataTestDir = new File(testPath.toString()).getAbsoluteFile();
     // Set this property so if mapreduce jobs run, they will use this as their home dir.
     System.setProperty("test.build.dir", this.dataTestDir.toString());
-    if (deleteOnExit()) this.dataTestDir.deleteOnExit();
+
+    if (deleteOnExit()) {
+      this.dataTestDir.deleteOnExit();
+    }
 
     createSubDir("hbase.local.dir", testPath, "hbase-local-dir");
 
@@ -125,7 +126,11 @@ public class HBaseCommonTestingUtility {
   protected void createSubDir(String propertyName, Path parent, String subDirName) {
     Path newPath = new Path(parent, subDirName);
     File newDir = new File(newPath.toString()).getAbsoluteFile();
-    if (deleteOnExit()) newDir.deleteOnExit();
+
+    if (deleteOnExit()) {
+      newDir.deleteOnExit();
+    }
+
     conf.set(propertyName, newDir.getAbsolutePath());
   }
 
@@ -140,9 +145,8 @@ public class HBaseCommonTestingUtility {
 
   /**
    * @return True if we removed the test dirs
-   * @throws IOException
    */
-  public boolean cleanupTestDir() throws IOException {
+  public boolean cleanupTestDir() {
     if (deleteDir(this.dataTestDir)) {
       this.dataTestDir = null;
       return true;
@@ -153,9 +157,8 @@ public class HBaseCommonTestingUtility {
   /**
    * @param subdir Test subdir name.
    * @return True if we removed the test dir
-   * @throws IOException
    */
-  boolean cleanupTestDir(final String subdir) throws IOException {
+  boolean cleanupTestDir(final String subdir) {
     if (this.dataTestDir == null) {
       return false;
     }
@@ -164,9 +167,9 @@ public class HBaseCommonTestingUtility {
 
   /**
    * @return Where to write test data on local filesystem; usually
-   * {@link #DEFAULT_BASE_TEST_DIRECTORY}
-   * Should not be used by the unit tests, hence its's private.
-   * Unit test will use a subdirectory of this directory.
+   *    {@link #DEFAULT_BASE_TEST_DIRECTORY}
+   *    Should not be used by the unit tests, hence its's private.
+   *    Unit test will use a subdirectory of this directory.
    * @see #setupDataTestDir()
    */
   private Path getBaseTestDir() {
@@ -185,9 +188,8 @@ public class HBaseCommonTestingUtility {
   /**
    * @param dir Directory to delete
    * @return True if we deleted it.
-   * @throws IOException
    */
-  boolean deleteDir(final File dir) throws IOException {
+  boolean deleteDir(final File dir) {
     if (dir == null || !dir.exists()) {
       return true;
     }
@@ -195,7 +197,10 @@ public class HBaseCommonTestingUtility {
     do {
       ntries += 1;
       try {
-        if (deleteOnExit()) FileUtils.deleteDirectory(dir);
+        if (deleteOnExit()) {
+          FileUtils.deleteDirectory(dir);
+        }
+
         return true;
       } catch (IOException ex) {
         LOG.warn("Failed to delete " + dir.getAbsolutePath());
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java
index e56dea8..3f8727b 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java
@@ -16,7 +16,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase;
 
 import org.apache.commons.logging.Log;
@@ -46,7 +45,6 @@ public class ResourceChecker {
     this.tagLine = tagLine;
   }
 
-
   /**
    * Class to implement for each type of resource.
    */
@@ -83,21 +81,22 @@ public class ResourceChecker {
 
     /**
      * The value for the resource.
-     * @param phase
+     * @param phase the {@link Phase} to get the value for
      */
     abstract public int getVal(Phase phase);
     
     /*
      * Retrieves List of Strings which would be logged in logEndings()
      */
-    public List<String> getStringsToLog() { return null; }
+    public List<String> getStringsToLog() {
+      return null;
+    }
   }
 
   private List<ResourceAnalyzer> ras = new ArrayList<ResourceAnalyzer>();
   private int[] initialValues;
   private int[] endingValues;
 
-
   private void fillInit() {
     initialValues = new int[ras.size()];
     fill(Phase.INITIAL, initialValues);
@@ -141,7 +140,11 @@ public class ResourceChecker {
     StringBuilder sb = new StringBuilder();
     for (ResourceAnalyzer ra : ras) {
       int cur = initialValues[i++];
-      if (sb.length() > 0) sb.append(", ");
+
+      if (sb.length() > 0) {
+        sb.append(", ");
+      }
+
       sb.append(ra.getName()).append("=").append(cur);
     }
     LOG.info("before: " + tagLine + " " + sb);
@@ -156,7 +159,11 @@ public class ResourceChecker {
     for (ResourceAnalyzer ra : ras) {
       int curP = initialValues[i];
       int curN = endingValues[i++];
-      if (sb.length() > 0) sb.append(", ");
+
+      if (sb.length() > 0) {
+        sb.append(", ");
+      }
+
       sb.append(ra.getName()).append("=").append(curN).append(" (was ").append(curP).append(")");
       if (curN > curP) {
         List<String> strings = ra.getStringsToLog();
@@ -171,7 +178,6 @@ public class ResourceChecker {
     LOG.info("after: " + tagLine + " " + sb);
   }
 
-
   /**
    * To be called as the beginning of a test method:
    * - measure the resources
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
index bdda494..7c9b6b1 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase;
 
 import static org.junit.Assert.assertEquals;
@@ -38,7 +37,7 @@ public class TestCellUtil {
   /**
    * CellScannable used in test. Returns a {@link TestCellScanner}
    */
-  private class TestCellScannable implements CellScannable {
+  private static class TestCellScannable implements CellScannable {
     private final int cellsCount;
     TestCellScannable(final int cellsCount) {
       this.cellsCount = cellsCount;
@@ -47,7 +46,7 @@ public class TestCellUtil {
     public CellScanner cellScanner() {
       return new TestCellScanner(this.cellsCount);
     }
-  };
+  }
 
   /**
    * CellScanner used in test.
@@ -67,7 +66,7 @@ public class TestCellUtil {
     }
 
     @Override
-    public boolean advance() throws IOException {
+    public boolean advance() {
       if (this.count < cellsCount) {
         this.current = new TestCell(this.count);
         this.count++;
@@ -221,34 +220,35 @@ public class TestCellUtil {
       // TODO Auto-generated method stub
       return 0;
     }
-  };
+  }
 
   /**
    * Was overflowing if 100k or so lists of cellscanners to return.
-   * @throws IOException
    */
   @Test
   public void testCreateCellScannerOverflow() throws IOException {
-    consume(doCreateCellScanner(1, 1), 1 * 1);
-    consume(doCreateCellScanner(3, 0), 3 * 0);
+    consume(doCreateCellScanner(1, 1), 1);
+    consume(doCreateCellScanner(3, 0), 0);
     consume(doCreateCellScanner(3, 3), 3 * 3);
-    consume(doCreateCellScanner(0, 1), 0 * 1);
+    consume(doCreateCellScanner(0, 1), 0);
     // Do big number. See HBASE-11813 for why.
     final int hundredK = 100000;
-    consume(doCreateCellScanner(hundredK, 0), hundredK * 0);
+    consume(doCreateCellScanner(hundredK, 0), 0);
     consume(doCreateCellArray(1), 1);
     consume(doCreateCellArray(0), 0);
     consume(doCreateCellArray(3), 3);
-    List<CellScannable> cells = new ArrayList<CellScannable>(hundredK);
+    List<CellScannable> cells = new ArrayList<>(hundredK);
     for (int i = 0; i < hundredK; i++) {
       cells.add(new TestCellScannable(1));
     }
-    consume(CellUtil.createCellScanner(cells), hundredK * 1);
-    NavigableMap<byte [], List<Cell>> m = new TreeMap<byte [], List<Cell>>(Bytes.BYTES_COMPARATOR);
-    List<Cell> cellArray = new ArrayList<Cell>(hundredK);
-    for (int i = 0; i < hundredK; i++) cellArray.add(new TestCell(i));
+    consume(CellUtil.createCellScanner(cells), hundredK);
+    NavigableMap<byte [], List<Cell>> m = new TreeMap<>(Bytes.BYTES_COMPARATOR);
+    List<Cell> cellArray = new ArrayList<>(hundredK);
+    for (int i = 0; i < hundredK; i++) {
+      cellArray.add(new TestCell(i));
+    }
     m.put(new byte [] {'f'}, cellArray);
-    consume(CellUtil.createCellScanner(m), hundredK * 1);
+    consume(CellUtil.createCellScanner(m), hundredK);
   }
 
   private CellScanner doCreateCellArray(final int itemsPerList) {
@@ -259,9 +259,8 @@ public class TestCellUtil {
     return CellUtil.createCellScanner(cells);
   }
 
-  private CellScanner doCreateCellScanner(final int listsCount, final int itemsPerList)
-  throws IOException {
-    List<CellScannable> cells = new ArrayList<CellScannable>(listsCount);
+  private CellScanner doCreateCellScanner(final int listsCount, final int itemsPerList) {
+    List<CellScannable> cells = new ArrayList<>(listsCount);
     for (int i = 0; i < listsCount; i++) {
       CellScannable cs = new CellScannable() {
         @Override
@@ -276,7 +275,9 @@ public class TestCellUtil {
 
   private void consume(final CellScanner scanner, final int expected) throws IOException
{
     int count = 0;
-    while (scanner.advance()) count++;
+    while (scanner.advance()) {
+      count++;
+    }
     Assert.assertEquals(expected, count);
   }
 
@@ -384,7 +385,7 @@ public class TestCellUtil {
   @Test
   public void testToString() {
     byte [] row = Bytes.toBytes("row");
-    long ts = 123l;
+    long ts = 123L;
     // Make a KeyValue and a Cell and see if same toString result.
     KeyValue kv = new KeyValue(row, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY,
         ts, KeyValue.Type.Minimum, HConstants.EMPTY_BYTE_ARRAY);
@@ -400,7 +401,6 @@ public class TestCellUtil {
         HConstants.EMPTY_BYTE_ARRAY);
     cellToString = CellUtil.getCellKeyAsString(cell);
     assertEquals(kv.toString(), cellToString);
-
   }
 
   @Test
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
index 24a4c0f..a20fffb 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -53,7 +52,6 @@ import org.junit.rules.TestName;
 
 @Category(SmallTests.class)
 public class TestClassFinder {
-
   private static final Log LOG = LogFactory.getLog(TestClassFinder.class);
 
   @Rule public TestName name = new TestName();
@@ -89,7 +87,7 @@ public class TestClassFinder {
   }
 
   @AfterClass
-  public static void deleteTestDir() throws IOException {
+  public static void deleteTestDir() {
     testUtil.cleanupTestDir(TestClassFinder.class.getSimpleName());
   }
 
@@ -185,8 +183,7 @@ public class TestClassFinder {
   }
 
   private static String createAndLoadJar(final String packageNameSuffix,
-      final String classNamePrefix, final long counter)
-  throws Exception {
+      final String classNamePrefix, final long counter) throws Exception {
     FileAndPath c1 = compileTestClass(counter, packageNameSuffix, classNamePrefix);
     FileAndPath c2 = compileTestClass(counter, packageNameSuffix, PREFIX + "1");
     FileAndPath c3 = compileTestClass(counter, packageNameSuffix, PREFIX + classNamePrefix
+ "2");
@@ -241,7 +238,9 @@ public class TestClassFinder {
 
   private static boolean contains(final Set<Class<?>> classes, final String simpleName)
{
     for (Class<?> c: classes) {
-      if (c.getSimpleName().equals(simpleName)) return true;
+      if (c.getSimpleName().equals(simpleName)) {
+        return true;
+      }
     }
     return false;
   }
@@ -299,8 +298,7 @@ public class TestClassFinder {
   @Test
   public void testClassFinderFiltersByPathInDirs() throws Exception {
     final String hardcodedThisSubdir = "hbase-common";
-    final ClassFinder.ResourcePathFilter notExcJarFilter =
-        new ClassFinder.ResourcePathFilter() {
+    final ClassFinder.ResourcePathFilter notExcJarFilter = new ClassFinder.ResourcePathFilter()
{
       @Override
       public boolean isCandidatePath(String resourcePath, boolean isJar) {
         return isJar || !resourcePath.contains(hardcodedThisSubdir);
@@ -383,7 +381,7 @@ public class TestClassFinder {
     // Directory entries for all packages have to be added explicitly for
     // resources to be findable via ClassLoader. Directory entries must end
     // with "/"; the initial one is expected to, also.
-    Set<String> pathsInJar = new HashSet<String>();
+    Set<String> pathsInJar = new HashSet<>();
     for (FileAndPath fileAndPath : filesInJar) {
       String pathToAdd = fileAndPath.path;
       while (pathsInJar.add(pathToAdd)) {
@@ -421,7 +419,6 @@ public class TestClassFinder {
 
   // Java 11 workaround - Custom class loader to expose addUrl method of URLClassLoader
   private static class CustomClassloader extends URLClassLoader {
-
     public CustomClassloader(URL[] urls, ClassLoader parentLoader) {
       super(urls, parentLoader);
     }
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java
b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java
index d8aed04..cfc676f 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase;
 
 import static org.junit.Assert.assertEquals;
@@ -40,7 +39,6 @@ import com.google.common.collect.ImmutableMap;
 
 @Category(SmallTests.class)
 public class TestHBaseConfiguration {
-
   private static final Log LOG = LogFactory.getLog(TestHBaseConfiguration.class);
 
   private static HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility();
@@ -51,7 +49,7 @@ public class TestHBaseConfiguration {
   }
 
   @Test
-  public void testSubset() throws Exception {
+  public void testSubset() {
     Configuration conf = HBaseConfiguration.create();
     // subset is used in TableMapReduceUtil#initCredentials to support different security
     // configurations between source and destination clusters, so we'll use that as an example
@@ -126,7 +124,6 @@ public class TestHBaseConfiguration {
 
     private static Object hadoopCredProviderFactory = null;
     private static Method getProvidersMethod = null;
-    private static Method getAliasesMethod = null;
     private static Method getCredentialEntryMethod = null;
     private static Method getCredentialMethod = null;
     private static Method createCredentialEntryMethod = null;
@@ -157,7 +154,7 @@ public class TestHBaseConfiguration {
       hadoopClassesAvailable = false;
 
       // Load Hadoop CredentialProviderFactory
-      Class<?> hadoopCredProviderFactoryClz = null;
+      Class<?> hadoopCredProviderFactoryClz;
       try {
         hadoopCredProviderFactoryClz = Class
             .forName(HADOOP_CRED_PROVIDER_FACTORY_CLASS_NAME);
@@ -177,13 +174,13 @@ public class TestHBaseConfiguration {
             HADOOP_CRED_PROVIDER_FACTORY_GET_PROVIDERS_METHOD_NAME,
             Configuration.class);
         // Load Hadoop CredentialProvider
-        Class<?> hadoopCredProviderClz = null;
+        Class<?> hadoopCredProviderClz;
         hadoopCredProviderClz = Class.forName(HADOOP_CRED_PROVIDER_CLASS_NAME);
         getCredentialEntryMethod = loadMethod(hadoopCredProviderClz,
             HADOOP_CRED_PROVIDER_GET_CREDENTIAL_ENTRY_METHOD_NAME, String.class);
 
-        getAliasesMethod = loadMethod(hadoopCredProviderClz,
-            HADOOP_CRED_PROVIDER_GET_ALIASES_METHOD_NAME);
+        Method getAliasesMethod =
+          loadMethod(hadoopCredProviderClz, HADOOP_CRED_PROVIDER_GET_ALIASES_METHOD_NAME);
 
         createCredentialEntryMethod = loadMethod(hadoopCredProviderClz,
             HADOOP_CRED_PROVIDER_CREATE_CREDENTIAL_ENTRY_METHOD_NAME,
@@ -193,7 +190,7 @@ public class TestHBaseConfiguration {
             HADOOP_CRED_PROVIDER_FLUSH_METHOD_NAME);
 
         // Load Hadoop CredentialEntry
-        Class<?> hadoopCredentialEntryClz = null;
+        Class<?> hadoopCredentialEntryClz;
         try {
           hadoopCredentialEntryClz = Class
               .forName(HADOOP_CRED_ENTRY_CLASS_NAME);
@@ -212,17 +209,15 @@ public class TestHBaseConfiguration {
       LOG.info("Credential provider classes have been" +
           " loaded and initialized successfully through reflection.");
       return true;
-
     }
 
     private Method loadMethod(Class<?> clz, String name, Class<?>... classes)
         throws Exception {
-      Method method = null;
+      Method method;
       try {
         method = clz.getMethod(name, classes);
       } catch (SecurityException e) {
-        fail("security exception caught for: " + name + " in " +
-      clz.getCanonicalName());
+        fail("security exception caught for: " + name + " in " + clz.getCanonicalName());
         throw e;
       } catch (NoSuchMethodException e) {
         LOG.error("Failed to load the " + name + ": " + e);
@@ -242,19 +237,11 @@ public class TestHBaseConfiguration {
     @SuppressWarnings("unchecked")
     protected  List<Object> getCredentialProviders(Configuration conf) {
       // Call CredentialProviderFactory.getProviders(Configuration)
-      Object providersObj = null;
+      Object providersObj;
       try {
         providersObj = getProvidersMethod.invoke(hadoopCredProviderFactory,
             conf);
-      } catch (IllegalArgumentException e) {
-        LOG.error("Failed to invoke: " + getProvidersMethod.getName() +
-            ": " + e);
-        return null;
-      } catch (IllegalAccessException e) {
-        LOG.error("Failed to invoke: " + getProvidersMethod.getName() +
-            ": " + e);
-        return null;
-      } catch (InvocationTargetException e) {
+      } catch (IllegalArgumentException | InvocationTargetException | IllegalAccessException
e) {
         LOG.error("Failed to invoke: " + getProvidersMethod.getName() +
             ": " + e);
         return null;
@@ -281,7 +268,6 @@ public class TestHBaseConfiguration {
      */
     public  void createEntry(Configuration conf, String name, char[] credential)
         throws Exception {
-
       if (!isHadoopCredentialProviderAvailable()) {
         return;
       }
@@ -311,30 +297,17 @@ public class TestHBaseConfiguration {
      */
     private void createEntryInProvider(Object credentialProvider,
         String name, char[] credential) throws Exception {
-
       if (!isHadoopCredentialProviderAvailable()) {
         return;
       }
 
       try {
         createCredentialEntryMethod.invoke(credentialProvider, name, credential);
-      } catch (IllegalArgumentException e) {
-        return;
-      } catch (IllegalAccessException e) {
-        return;
-      } catch (InvocationTargetException e) {
+      } catch (IllegalArgumentException | InvocationTargetException | IllegalAccessException
e) {
         return;
       }
 
-      try {
-        flushMethod.invoke(credentialProvider);
-      } catch (IllegalArgumentException e) {
-        throw e;
-      } catch (IllegalAccessException e) {
-        throw e;
-      } catch (InvocationTargetException e) {
-        throw e;
-      }
+      flushMethod.invoke(credentialProvider);
     }
   }
 }
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java
b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java
index 2a1569b..86658d5 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java
@@ -43,7 +43,6 @@ import com.google.common.io.CountingOutputStream;
 
 @Category(SmallTests.class)
 public class TestCellCodecWithTags {
-
   @Test
   public void testCellWithTag() throws IOException {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
@@ -110,4 +109,4 @@ public class TestCellCodecWithTags {
     dis.close();
     assertEquals(offset, cis.getCount());
   }
-}
\ No newline at end of file
+}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java
b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java
index c217cfa..f41dffc 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java
@@ -43,7 +43,6 @@ import com.google.common.io.CountingOutputStream;
 
 @Category(SmallTests.class)
 public class TestKeyValueCodecWithTags {
-
   @Test
   public void testKeyValueWithTag() throws IOException {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
@@ -110,4 +109,4 @@ public class TestKeyValueCodecWithTags {
     dis.close();
     assertEquals(offset, cis.getCount());
   }
-}
\ No newline at end of file
+}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java
b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java
index e31ab49..78a21fb 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java
@@ -1,22 +1,24 @@
 /*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ *     http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
  */
 package org.apache.hadoop.hbase.io.crypto;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
@@ -36,7 +38,6 @@ import org.junit.experimental.categories.Category;
 
 @Category(SmallTests.class)
 public class TestEncryption {
-
   private static final Log LOG = LogFactory.getLog(TestEncryption.class);
 
   @Test
@@ -45,7 +46,7 @@ public class TestEncryption {
     Bytes.random(key);
     byte[] iv = new byte[16];
     Bytes.random(iv);
-    for (int size: new int[] { 4, 8, 16, 32, 64, 128, 256, 512 } ) {
+    for (int size: new int[] { 4, 8, 16, 32, 64, 128, 256, 512 }) {
       checkTransformSymmetry(key, iv, getRandomBlock(size));
     }
   }
@@ -56,7 +57,7 @@ public class TestEncryption {
     Bytes.random(key);
     byte[] iv = new byte[16];
     Bytes.random(iv);
-    for (int size: new int[] { 256 * 1024, 512 * 1024, 1024 * 1024 } ) {
+    for (int size: new int[] { 256 * 1024, 512 * 1024, 1024 * 1024 }) {
       checkTransformSymmetry(key, iv, getRandomBlock(size));
     }
   }
@@ -67,7 +68,7 @@ public class TestEncryption {
     Bytes.random(key);
     byte[] iv = new byte[16];
     Bytes.random(iv);
-    for (int size: new int[] { 3, 7, 11, 23, 47, 79, 119, 175 } ) {
+    for (int size: new int[] { 3, 7, 11, 23, 47, 79, 119, 175 }) {
       checkTransformSymmetry(key, iv, getRandomBlock(size));
     }
   }
@@ -78,7 +79,7 @@ public class TestEncryption {
     Bytes.random(key);
     byte[] iv = new byte[16];
     Bytes.random(iv);
-    for (int size: new int[] { 4 * 1024, 8 * 1024, 64 * 1024, 128 * 1024 } ) {
+    for (int size: new int[] { 4 * 1024, 8 * 1024, 64 * 1024, 128 * 1024 }) {
       checkTransformSymmetry(key, iv, getRandomBlock(size));
     }
   }
@@ -121,5 +122,4 @@ public class TestEncryption {
     Bytes.random(b);
     return b;
   }
-
 }
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java
index 9190011..df37729 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java
@@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.util;
 
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -88,7 +89,6 @@ public class RedundantKVGenerator {
     );
   }
 
-
   /**
    * Various configuration options for generating key values
    * @param randomizer pick things by random
@@ -111,8 +111,7 @@ public class RedundantKVGenerator {
       float chanceForZeroValue,
 
       int baseTimestampDivide,
-      int timestampDiffSize
-      ) {
+      int timestampDiffSize) {
     this.randomizer = randomizer;
 
     this.commonPrefix = DEFAULT_COMMON_PREFIX;
@@ -140,33 +139,33 @@ public class RedundantKVGenerator {
   private Random randomizer;
 
   // row settings
-  private byte[] commonPrefix;//global prefix before rowPrefixes
+  private byte[] commonPrefix; //global prefix before rowPrefixes
   private int numberOfRowPrefixes;
-  private int averagePrefixLength = 6;
-  private int prefixLengthVariance = 3;
-  private int averageSuffixLength = 3;
-  private int suffixLengthVariance = 3;
-  private int numberOfRows = 500;
+  private int averagePrefixLength;
+  private int prefixLengthVariance;
+  private int averageSuffixLength;
+  private int suffixLengthVariance;
+  private int numberOfRows;
 
-  //family
+  // family
   private byte[] family;
 
   // qualifier
-  private float chanceForSameQualifier = 0.5f;
-  private float chanceForSimilarQualifier = 0.4f;
-  private int averageQualifierLength = 9;
-  private int qualifierLengthVariance = 3;
+  private float chanceForSameQualifier;
+  private float chanceForSimilarQualifier;
+  private int averageQualifierLength;
+  private int qualifierLengthVariance;
 
-  private int columnFamilyLength = 9;
-  private int valueLength = 8;
-  private float chanceForZeroValue = 0.5f;
+  private int columnFamilyLength;
+  private int valueLength;
+  private float chanceForZeroValue;
 
-  private int baseTimestampDivide = 1000000;
-  private int timestampDiffSize = 100000000;
+  private int baseTimestampDivide;
+  private int timestampDiffSize;
 
   private List<byte[]> generateRows() {
     // generate prefixes
-    List<byte[]> prefixes = new ArrayList<byte[]>();
+    List<byte[]> prefixes = new ArrayList<>();
     prefixes.add(new byte[0]);
     for (int i = 1; i < numberOfRowPrefixes; ++i) {
       int prefixLength = averagePrefixLength;
@@ -174,12 +173,11 @@ public class RedundantKVGenerator {
           prefixLengthVariance;
       byte[] newPrefix = new byte[prefixLength];
       randomizer.nextBytes(newPrefix);
-      byte[] newPrefixWithCommon = newPrefix;
-      prefixes.add(newPrefixWithCommon);
+      prefixes.add(newPrefix);
     }
 
     // generate rest of the row
-    List<byte[]> rows = new ArrayList<byte[]>();
+    List<byte[]> rows = new ArrayList<>();
     for (int i = 0; i < numberOfRows; ++i) {
       int suffixLength = averageSuffixLength;
       suffixLength += randomizer.nextInt(2 * suffixLengthVariance + 1) -
@@ -202,16 +200,17 @@ public class RedundantKVGenerator {
   public List<KeyValue> generateTestKeyValues(int howMany) {
     return generateTestKeyValues(howMany, false);
   }
+
   /**
    * Generate test data useful to test encoders.
    * @param howMany How many Key values should be generated.
    * @return sorted list of key values
    */
   public List<KeyValue> generateTestKeyValues(int howMany, boolean useTags) {
-    List<KeyValue> result = new ArrayList<KeyValue>();
+    List<KeyValue> result = new ArrayList<>();
 
     List<byte[]> rows = generateRows();
-    Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<Integer, List<byte[]>>();
+    Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<>();
 
     if(family==null){
       family = new byte[columnFamilyLength];
@@ -268,16 +267,14 @@ public class RedundantKVGenerator {
       }
 
       if (randomizer.nextFloat() < chanceForZeroValue) {
-        for (int j = 0; j < value.length; ++j) {
-          value[j] = (byte) 0;
-        }
+        Arrays.fill(value, (byte) 0);
       } else {
         randomizer.nextBytes(value);
       }
 
       if (useTags) {
-        result.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new
Tag(
-            (byte) 1, "value1") }));
+        result.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] {
+          new Tag((byte) 1, "value1") }));
       } else {
         result.add(new KeyValue(row, family, qualifier, timestamp, value));
       }
@@ -313,97 +310,9 @@ public class RedundantKVGenerator {
     return result;
   }
 
-  /************************ get/set ***********************************/
-
-  public RedundantKVGenerator setCommonPrefix(byte[] prefix){
-    this.commonPrefix = prefix;
-    return this;
-  }
-
-  public RedundantKVGenerator setRandomizer(Random randomizer) {
-    this.randomizer = randomizer;
-    return this;
-  }
-
-  public RedundantKVGenerator setNumberOfRowPrefixes(int numberOfRowPrefixes) {
-    this.numberOfRowPrefixes = numberOfRowPrefixes;
-    return this;
-  }
-
-  public RedundantKVGenerator setAveragePrefixLength(int averagePrefixLength) {
-    this.averagePrefixLength = averagePrefixLength;
-    return this;
-  }
-
-  public RedundantKVGenerator setPrefixLengthVariance(int prefixLengthVariance) {
-    this.prefixLengthVariance = prefixLengthVariance;
-    return this;
-  }
-
-  public RedundantKVGenerator setAverageSuffixLength(int averageSuffixLength) {
-    this.averageSuffixLength = averageSuffixLength;
-    return this;
-  }
-
-  public RedundantKVGenerator setSuffixLengthVariance(int suffixLengthVariance) {
-    this.suffixLengthVariance = suffixLengthVariance;
-    return this;
-  }
-
-  public RedundantKVGenerator setNumberOfRows(int numberOfRows) {
-    this.numberOfRows = numberOfRows;
-    return this;
-  }
-
-  public RedundantKVGenerator setChanceForSameQualifier(float chanceForSameQualifier) {
-    this.chanceForSameQualifier = chanceForSameQualifier;
-    return this;
-  }
-
-  public RedundantKVGenerator setChanceForSimilarQualifier(float chanceForSimiliarQualifier)
{
-    this.chanceForSimilarQualifier = chanceForSimiliarQualifier;
-    return this;
-  }
-
-  public RedundantKVGenerator setAverageQualifierLength(int averageQualifierLength) {
-    this.averageQualifierLength = averageQualifierLength;
-    return this;
-  }
-
-  public RedundantKVGenerator setQualifierLengthVariance(int qualifierLengthVariance) {
-    this.qualifierLengthVariance = qualifierLengthVariance;
-    return this;
-  }
-
-  public RedundantKVGenerator setColumnFamilyLength(int columnFamilyLength) {
-    this.columnFamilyLength = columnFamilyLength;
-    return this;
-  }
-
   public RedundantKVGenerator setFamily(byte[] family) {
     this.family = family;
     this.columnFamilyLength = family.length;
     return this;
   }
-
-  public RedundantKVGenerator setValueLength(int valueLength) {
-    this.valueLength = valueLength;
-    return this;
-  }
-
-  public RedundantKVGenerator setChanceForZeroValue(float chanceForZeroValue) {
-    this.chanceForZeroValue = chanceForZeroValue;
-    return this;
-  }
-
-  public RedundantKVGenerator setBaseTimestampDivide(int baseTimestampDivide) {
-    this.baseTimestampDivide = baseTimestampDivide;
-    return this;
-  }
-
-  public RedundantKVGenerator setTimestampDiffSize(int timestampDiffSize) {
-    this.timestampDiffSize = timestampDiffSize;
-    return this;
-  }
-
 }
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 9a8a902..90ed49c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -1187,10 +1187,9 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility
{
 
   /**
    * @return True if we removed the test dirs
-   * @throws IOException
    */
   @Override
-  public boolean cleanupTestDir() throws IOException {
+  public boolean cleanupTestDir() {
     boolean ret = super.cleanupTestDir();
     if (deleteDir(this.clusterTestDir)) {
       this.clusterTestDir = null;


Mime
View raw message