geode-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kdul...@apache.org
Subject [3/3] geode git commit: GEODE-2457: Replace org.apache.geode.internal.FileUtil with org.apache.commons.io.FileUtils
Date Thu, 23 Feb 2017 19:41:29 GMT
GEODE-2457: Replace org.apache.geode.internal.FileUtil with org.apache.commons.io.FileUtils


Project: http://git-wip-us.apache.org/repos/asf/geode/repo
Commit: http://git-wip-us.apache.org/repos/asf/geode/commit/29246ab7
Tree: http://git-wip-us.apache.org/repos/asf/geode/tree/29246ab7
Diff: http://git-wip-us.apache.org/repos/asf/geode/diff/29246ab7

Branch: refs/heads/develop
Commit: 29246ab7f136e6bd8c91d055980a6ed639ad60e6
Parents: fa5963a
Author: Kevin J. Duling <kduling@pivotal.io>
Authored: Fri Feb 10 15:45:45 2017 -0800
Committer: Kevin J. Duling <kduling@pivotal.io>
Committed: Thu Feb 23 11:09:35 2017 -0800

----------------------------------------------------------------------
 .../session/installer/InstallerJUnitTest.java   |  19 +-
 .../org/apache/geode/BundledJarsJUnitTest.java  |  27 +-
 ...erConfigurationServiceEndToEndDUnitTest.java |   8 -
 .../org/apache/geode/internal/FileUtil.java     | 328 -------------------
 .../geode/internal/cache/DiskInitFile.java      |  60 ++--
 .../geode/internal/cache/DiskStoreImpl.java     | 192 +++++------
 .../org/apache/geode/internal/cache/Oplog.java  |  82 ++++-
 .../internal/cache/PersistentOplogSet.java      |  40 ++-
 .../cache/persistence/BackupManager.java        |  57 ++--
 .../cache/persistence/RestoreScript.java        |   6 +-
 .../io/MainWithChildrenRollingFileHandler.java  |  23 +-
 .../geode/internal/logging/MergeLogFiles.java   |  29 +-
 .../client/ClientCacheFactoryJUnitTest.java     |  17 +-
 .../geode/cache/query/QueryTestUtils.java       |  30 +-
 .../dunit/QueryIndexUsingXMLDUnitTest.java      |  31 +-
 .../functional/IndexCreationJUnitTest.java      |  51 +--
 .../AbstractLauncherIntegrationTestCase.java    |  16 +-
 .../geode/internal/FileUtilJUnitTest.java       | 101 ------
 .../geode/internal/JarDeployerDUnitTest.java    |  28 +-
 .../internal/JarDeployerIntegrationTest.java    |   2 +-
 .../geode/internal/PdxDeleteFieldDUnitTest.java |  30 +-
 .../geode/internal/PdxDeleteFieldJUnitTest.java |  32 +-
 .../geode/internal/PdxRenameDUnitTest.java      |  30 +-
 .../geode/internal/PdxRenameJUnitTest.java      |  20 +-
 .../geode/internal/cache/BackupDUnitTest.java   | 113 ++++---
 .../geode/internal/cache/BackupJUnitTest.java   |  58 +++-
 .../cache/DiskRegionAsyncRecoveryJUnitTest.java |  28 +-
 .../internal/cache/DiskRegionTestingBase.java   |  36 +-
 .../cache/IncrementalBackupDUnitTest.java       | 167 ++++++----
 .../geode/internal/cache/OplogRVVJUnitTest.java |  42 +--
 .../cache/PartitionedRegionStatsJUnitTest.java  |  25 +-
 .../PersistentPartitionedRegionJUnitTest.java   |  25 +-
 ...tentColocatedPartitionedRegionDUnitTest.java |  55 ++--
 .../PersistentPartitionedRegionTestBase.java    |  33 +-
 .../fixed/FixedPartitioningTestBase.java        |  40 ++-
 .../persistence/BackupInspectorJUnitTest.java   |  18 +-
 .../PersistentReplicatedTestBase.java           |  26 +-
 .../RollingUpgrade2DUnitTest.java               |   6 +-
 .../rollingupgrade/RollingUpgradeDUnitTest.java |   7 +-
 .../logging/log4j/Log4J2PerformanceTest.java    |  27 +-
 .../log4j/LogWriterLoggerPerformanceTest.java   |  27 +-
 .../internal/net/SSLSocketIntegrationTest.java  |  49 +--
 ...eateAlterDestroyRegionCommandsDUnitTest.java |   6 +-
 .../commands/DiskStoreCommandsDUnitTest.java    |  10 +-
 ...laneousCommandsExportLogsPart1DUnitTest.java |  21 +-
 ...laneousCommandsExportLogsPart2DUnitTest.java |  22 +-
 ...laneousCommandsExportLogsPart3DUnitTest.java |  17 +-
 ...laneousCommandsExportLogsPart4DUnitTest.java |  21 +-
 .../cli/commands/QueueCommandsDUnitTest.java    |  10 +-
 .../cli/commands/UserCommandsDUnitTest.java     |  10 +-
 .../geode/pdx/PdxAttributesJUnitTest.java       |  19 +-
 .../geode/pdx/PdxSerializableJUnitTest.java     |  72 ++--
 .../test/dunit/standalone/ProcessManager.java   |  18 +-
 .../org/apache/geode/util/test/TestUtil.java    |   8 +-
 .../geode/internal/cache/wan/WANTestBase.java   |  97 +++---
 55 files changed, 1078 insertions(+), 1294 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/extensions/geode-modules-session/src/test/java/org/apache/geode/modules/session/installer/InstallerJUnitTest.java
----------------------------------------------------------------------
diff --git a/extensions/geode-modules-session/src/test/java/org/apache/geode/modules/session/installer/InstallerJUnitTest.java b/extensions/geode-modules-session/src/test/java/org/apache/geode/modules/session/installer/InstallerJUnitTest.java
index e51241b..aa3e0d2 100644
--- a/extensions/geode-modules-session/src/test/java/org/apache/geode/modules/session/installer/InstallerJUnitTest.java
+++ b/extensions/geode-modules-session/src/test/java/org/apache/geode/modules/session/installer/InstallerJUnitTest.java
@@ -14,22 +14,21 @@
  */
 package org.apache.geode.modules.session.installer;
 
-import static org.junit.Assert.*;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.InputStream;
-
-import org.apache.geode.internal.FileUtil;
-import org.apache.geode.test.junit.categories.IntegrationTest;
+import static org.junit.Assert.assertEquals;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
+import org.apache.geode.test.junit.categories.IntegrationTest;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TemporaryFolder;
 
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStream;
+
 @Category(IntegrationTest.class)
 public class InstallerJUnitTest {
 
@@ -43,7 +42,7 @@ public class InstallerJUnitTest {
 
   private void testTransformation(final String name) throws Exception {
     File webXmlFile = temporaryFolder.newFile();
-    FileUtil.copy(getClass().getResource(name), webXmlFile);
+    FileUtils.copyFile(new File(getClass().getResource(name).getFile()), webXmlFile);
     final String[] args = {"-t", "peer-to-peer", "-w", webXmlFile.getAbsolutePath()};
 
     ByteArrayOutputStream output = new ByteArrayOutputStream();

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-assembly/src/test/java/org/apache/geode/BundledJarsJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-assembly/src/test/java/org/apache/geode/BundledJarsJUnitTest.java b/geode-assembly/src/test/java/org/apache/geode/BundledJarsJUnitTest.java
index b7ada4a..3f0e2c0 100644
--- a/geode-assembly/src/test/java/org/apache/geode/BundledJarsJUnitTest.java
+++ b/geode-assembly/src/test/java/org/apache/geode/BundledJarsJUnitTest.java
@@ -14,13 +14,21 @@
  */
 package org.apache.geode;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertTrue;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.geode.test.junit.categories.IntegrationTest;
+import org.apache.geode.test.junit.categories.RestAPITest;
+import org.apache.geode.util.test.TestUtil;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.Paths;
-import java.util.List;
+import java.util.Collection;
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.TreeSet;
@@ -28,15 +36,6 @@ import java.util.jar.JarFile;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
-import org.apache.geode.test.junit.categories.RestAPITest;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-import org.apache.geode.internal.FileUtil;
-import org.apache.geode.test.junit.categories.IntegrationTest;
-import org.apache.geode.util.test.TestUtil;
-
 @Category({IntegrationTest.class, RestAPITest.class})
 public class BundledJarsJUnitTest {
 
@@ -93,11 +92,11 @@ public class BundledJarsJUnitTest {
         "Please set the GEODE_HOME environment variable to the product installation directory.",
         geodeHomeDirectory.isDirectory());
 
-    List<File> jars = FileUtil.findAll(geodeHomeDirectory, ".*\\.jar");
+    Collection<File> jars = FileUtils.listFiles(geodeHomeDirectory, new String[] {"jar"}, true);
     TreeMap<String, String> sortedJars = new TreeMap<String, String>();
-    jars.stream().forEach(jar -> sortedJars.put(jar.getName(), jar.getPath()));
+    jars.forEach(jar -> sortedJars.put(jar.getName(), jar.getPath()));
 
-    List<File> wars = FileUtil.findAll(geodeHomeDirectory, ".*\\.war");
+    Collection<File> wars = FileUtils.listFiles(geodeHomeDirectory, new String[] {"war"}, true);
     TreeSet<File> sortedWars = new TreeSet<File>(wars);
     sortedWars.stream().flatMap(BundledJarsJUnitTest::extractJarNames)
         .forEach(jar -> sortedJars.put(jar.getName(), jar.getPath()));

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-assembly/src/test/java/org/apache/geode/management/internal/configuration/ClusterConfigurationServiceEndToEndDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-assembly/src/test/java/org/apache/geode/management/internal/configuration/ClusterConfigurationServiceEndToEndDUnitTest.java b/geode-assembly/src/test/java/org/apache/geode/management/internal/configuration/ClusterConfigurationServiceEndToEndDUnitTest.java
index a96f8af..0103cf6 100644
--- a/geode-assembly/src/test/java/org/apache/geode/management/internal/configuration/ClusterConfigurationServiceEndToEndDUnitTest.java
+++ b/geode-assembly/src/test/java/org/apache/geode/management/internal/configuration/ClusterConfigurationServiceEndToEndDUnitTest.java
@@ -27,8 +27,6 @@ import static org.apache.geode.distributed.ConfigurationProperties.LOG_LEVEL;
 import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
 import static org.apache.geode.distributed.ConfigurationProperties.NAME;
 import static org.apache.geode.internal.AvailablePortHelper.getRandomAvailableTCPPorts;
-import static org.apache.geode.internal.FileUtil.delete;
-import static org.apache.geode.internal.FileUtil.deleteMatching;
 import static org.apache.geode.internal.lang.StringUtils.isBlank;
 import static org.apache.geode.management.internal.cli.CliUtil.getAllNormalMembers;
 import static org.apache.geode.test.dunit.Assert.assertEquals;
@@ -48,7 +46,6 @@ import org.apache.geode.cache.wan.GatewaySender.OrderPolicy;
 import org.apache.geode.distributed.Locator;
 import org.apache.geode.distributed.internal.InternalLocator;
 import org.apache.geode.internal.ClassBuilder;
-import org.apache.geode.internal.JarDeployer;
 import org.apache.geode.internal.admin.remote.ShutdownAllRequest;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
 import org.apache.geode.management.cli.Result.Status;
@@ -323,11 +320,6 @@ public class ClusterConfigurationServiceEndToEndDUnitTest extends CliCommandTest
     jarFileNames.add(jarName);
   }
 
-  private void deleteSavedJarFiles() throws IOException {
-    deleteMatching(new File("."), "^" + JarDeployer.JAR_PREFIX + "Deploy1.*#\\d++$");
-    delete(new File("Deploy1.jar"));
-  }
-
   private Object[] setup() throws IOException {
     final int[] ports = getRandomAvailableTCPPorts(3);
     final int locator1Port = ports[0];

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/main/java/org/apache/geode/internal/FileUtil.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/internal/FileUtil.java b/geode-core/src/main/java/org/apache/geode/internal/FileUtil.java
deleted file mode 100644
index 2d72930..0000000
--- a/geode-core/src/main/java/org/apache/geode/internal/FileUtil.java
+++ /dev/null
@@ -1,328 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
- * agreements. See the NOTICE file distributed with this work for additional information regarding
- * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.geode.internal;
-
-import org.apache.geode.distributed.internal.DistributionConfig;
-
-import java.io.*;
-import java.net.URL;
-import java.nio.channels.FileChannel;
-import java.nio.file.Files;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * This class contains static methods for manipulating files and directories, such as recursively
- * copying or deleting files.
- * 
- * TODO A lot of this functionality is probably duplicating apache commons io, maybe we should
- * switch to that.
- * 
- * 
- */
-public class FileUtil {
-  public static final long MAX_TRANSFER_SIZE =
-      Long.getLong(DistributionConfig.GEMFIRE_PREFIX + "FileUtil.MAX_TRANSFER_SIZE", 1024 * 1024)
-          .longValue();
-  public static final boolean USE_NIO =
-      !Boolean.getBoolean(DistributionConfig.GEMFIRE_PREFIX + "FileUtil.USE_OLD_IO");
-  public static final String extSeparator = ".";
-
-  /**
-   * Copy a file from the source file to the destination file. If the source is a directory, it will
-   * be copied recursively.
-   * 
-   * Note that unlike unix cp, if the destination is directory, the source *contents* will be copied
-   * to the destination *contents*, not as a subdirectory of dest.
-   * 
-   * @param source the source file or directory
-   * @param dest the destination file or directory.
-   * @throws IOException
-   */
-  public static void copy(File source, File dest) throws IOException {
-    if (source.isDirectory()) {
-      dest.mkdir();
-      for (File child : listFiles(source)) {
-        copy(child, new File(dest, child.getName()));
-      }
-    } else {
-      if (source.exists()) {
-        long lm = source.lastModified();
-        if (dest.isDirectory()) {
-          dest = new File(dest, source.getName());
-        }
-        FileOutputStream fos = new FileOutputStream(dest);
-        try {
-          FileInputStream fis = new FileInputStream(source);
-          try {
-            if (USE_NIO) {
-              nioCopy(fos, fis);
-            } else {
-              oioCopy(source, fos, fis);
-            }
-          } finally {
-            fis.close();
-          }
-        } finally {
-          fos.close();
-        }
-        dest.setExecutable(source.canExecute(), true);
-        dest.setLastModified(lm);
-      }
-    }
-  }
-
-  /**
-   * Basically just like {@link File#listFiles()} but instead of returning null returns an empty
-   * array. This fixes bug 43729
-   */
-  public static File[] listFiles(File dir) {
-    File[] result = dir.listFiles();
-    if (result == null) {
-      result = new File[0];
-    }
-    return result;
-  }
-
-  /**
-   * Basically just like {@link File#listFiles(FilenameFilter)} but instead of returning null
-   * returns an empty array. This fixes bug 43729
-   */
-  public static File[] listFiles(File dir, FilenameFilter filter) {
-    File[] result = dir.listFiles(filter);
-    if (result == null) {
-      result = new File[0];
-    }
-    return result;
-  }
-
-  /**
-   * Copy a single file using NIO.
-   * 
-   * @throws IOException
-   */
-  private static void nioCopy(FileOutputStream fos, FileInputStream fis) throws IOException {
-    FileChannel outChannel = fos.getChannel();
-    FileChannel inChannel = fis.getChannel();
-    long length = inChannel.size();
-    long offset = 0;
-    while (true) {
-      long remaining = length - offset;
-
-      long toTransfer = remaining < MAX_TRANSFER_SIZE ? remaining : MAX_TRANSFER_SIZE;
-      long transferredBytes = inChannel.transferTo(offset, toTransfer, outChannel);
-      offset += transferredBytes;
-      length = inChannel.size();
-      if (offset >= length) {
-        break;
-      }
-    }
-  }
-
-  /**
-   * Copy a single file using the java.io.
-   * 
-   * @throws IOException
-   */
-  private static void oioCopy(File source, FileOutputStream fos, FileInputStream fis)
-      throws IOException {
-    int size = (int) (source.length() < MAX_TRANSFER_SIZE ? source.length() : MAX_TRANSFER_SIZE);
-    byte[] buffer = new byte[size];
-    int read;
-    while ((read = fis.read(buffer)) > 0) {
-      fos.write(buffer, 0, read);
-    }
-
-  }
-
-  /**
-   * Recursively delete a file or directory.
-   * 
-   * @throws IOException if the file or directory couldn't be deleted. Unlike File.delete, which
-   *         just returns false.
-   */
-  public static void delete(File file) throws IOException {
-    if (!file.exists())
-      return;
-
-    if (file.isDirectory()) {
-      for (File child : listFiles(file)) {
-        delete(child);
-      }
-    }
-
-    Files.delete(file.toPath());
-  }
-
-  /**
-   * Recursively delete a file or directory. A description of any files or directories that can not
-   * be deleted will be added to failures if failures is non-null. This method tries to delete as
-   * much as possible.
-   */
-  public static void delete(File file, StringBuilder failures) {
-    if (!file.exists())
-      return;
-
-    if (file.isDirectory()) {
-      for (File child : listFiles(file)) {
-        delete(child, failures);
-      }
-    }
-
-    try {
-      Files.delete(file.toPath());
-    } catch (IOException e) {
-      if (failures != null) {
-        failures.append("Could not delete ").append(file).append(" due to ").append(e.getMessage())
-            .append('\n');
-      }
-    }
-  }
-
-  /**
-   * Find the file whose name matches the given regular expression. The regex is matched against the
-   * absolute path of the file.
-   * 
-   * This could probably use a lot of optimization!
-   */
-  public static File find(File baseFile, String regex) {
-    if (baseFile.getAbsolutePath().matches(regex)) {
-      return baseFile;
-    }
-    if (baseFile.exists() && baseFile.isDirectory()) {
-      for (File child : listFiles(baseFile)) {
-        File foundFile = find(child, regex);
-        if (foundFile != null) {
-          return foundFile;
-        }
-      }
-    }
-    return null;
-  }
-
-  /**
-   * Find a files in a given base directory that match a the given regex. The regex is matched
-   * against the full path of the file.
-   */
-  public static List<File> findAll(File baseFile, String regex) {
-    ArrayList<File> found = new ArrayList<File>();
-    findAll(baseFile, regex, found);
-    return found;
-  }
-
-  /**
-   * Destroys all files that match the given regex that are in the given directory. If a destroy
-   * fails it is ignored and an attempt is made to destroy any other files that match.
-   */
-  public static void deleteMatching(File baseFile, String regex) {
-    if (baseFile.exists() && baseFile.isDirectory()) {
-      for (File child : listFiles(baseFile)) {
-        if (child.getName().matches(regex)) {
-          try {
-            delete(child);
-          } catch (IOException ignore) {
-          }
-        }
-      }
-    }
-  }
-
-  /** Implementation of findAll. */
-  private static void findAll(File baseFile, String regex, List<File> found) {
-    if (baseFile.getAbsolutePath().matches(regex)) {
-      found.add(baseFile);
-    }
-    if (baseFile.exists() && baseFile.isDirectory()) {
-      for (File child : listFiles(baseFile)) {
-        findAll(child, regex, found);
-      }
-    }
-  }
-
-  /**
-   * Convert a file into a relative path from a given parent. This is useful if you want to write
-   * out the file name into that parent directory.
-   * 
-   * @param parent The parent directory.
-   * @param file The file we want to covert to a relative file.
-   * @return A file, such that new File(parent, returnValue) == file. Note that if file does not
-   *         have the parent in it's path, an the absolute version if the file is returned.
-   */
-  public static File removeParent(File parent, File file) {
-    String absolutePath = file.getAbsolutePath();
-    String parentAbsolutePath = parent.getAbsolutePath();
-    String newPath = absolutePath.replace(parentAbsolutePath + "/", "");
-    return new File(newPath);
-  }
-
-  /**
-   * Copy a URL to a file.
-   * 
-   * @throws IOException
-   */
-  public static void copy(URL url, File file) throws IOException {
-    InputStream is = url.openStream();
-    try {
-      OutputStream os = new FileOutputStream(file);
-      try {
-        byte[] buffer = new byte[8192];
-        int read;
-        while ((read = is.read(buffer)) > 0) {
-          os.write(buffer, 0, read);
-        }
-      } finally {
-        os.close();
-      }
-    } finally {
-      is.close();
-    }
-
-  }
-
-  /**
-   * A safer version of File.mkdirs, which works around a race in the 1.5 JDK where two VMs creating
-   * the same directory chain at the same time could end up in one VM failing to create a
-   * subdirectory.
-   * 
-   * @param file
-   */
-  public static boolean mkdirs(File file) {
-    final File parentFile = file.getAbsoluteFile().getParentFile();
-    if (!parentFile.exists()) {
-      mkdirs(parentFile);
-    }
-    // As long as someone successfully created the parent file
-    // go ahead and create the child directory.
-    if (parentFile.exists()) {
-      return file.mkdir();
-    } else {
-      return false;
-    }
-  }
-
-  /**
-   * Returns the file name with the extension stripped off (if it has one).
-   * 
-   * @param fileName the file name
-   * @return the file name with the extension stripped off (if it had one)
-   */
-  public static String stripOffExtension(final String fileName) {
-    if (fileName.contains(extSeparator)) {
-      // strip off the extension and right-most "."
-      return fileName.substring(0, fileName.lastIndexOf(extSeparator));
-    }
-    return fileName;
-  }
-}

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/main/java/org/apache/geode/internal/cache/DiskInitFile.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/internal/cache/DiskInitFile.java b/geode-core/src/main/java/org/apache/geode/internal/cache/DiskInitFile.java
index 4023b71..f6bf17f 100644
--- a/geode-core/src/main/java/org/apache/geode/internal/cache/DiskInitFile.java
+++ b/geode-core/src/main/java/org/apache/geode/internal/cache/DiskInitFile.java
@@ -20,35 +20,7 @@ import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
 import it.unimi.dsi.fastutil.longs.LongIterator;
 import it.unimi.dsi.fastutil.longs.LongOpenHashSet;
 import it.unimi.dsi.fastutil.objects.ObjectIterator;
-
-import java.io.BufferedInputStream;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.EOFException;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.io.RandomAccessFile;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.UUID;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.logging.log4j.Logger;
-
+import org.apache.commons.io.FileUtils;
 import org.apache.geode.CancelCriterion;
 import org.apache.geode.CancelException;
 import org.apache.geode.DataSerializer;
@@ -59,7 +31,6 @@ import org.apache.geode.cache.EvictionAlgorithm;
 import org.apache.geode.cache.RegionAttributes;
 import org.apache.geode.cache.RegionDestroyedException;
 import org.apache.geode.compression.Compressor;
-import org.apache.geode.internal.FileUtil;
 import org.apache.geode.internal.HeapDataOutputStream;
 import org.apache.geode.internal.InternalDataSerializer;
 import org.apache.geode.internal.InternalInstantiator;
@@ -80,6 +51,33 @@ import org.apache.geode.internal.cache.versions.RegionVersionVector;
 import org.apache.geode.internal.i18n.LocalizedStrings;
 import org.apache.geode.internal.logging.LogService;
 import org.apache.geode.internal.logging.log4j.LogMarker;
+import org.apache.logging.log4j.Logger;
+
+import java.io.BufferedInputStream;
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.io.RandomAccessFile;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
 
 /**
  * Does all the IF file work for a DiskStoreImpl.
@@ -1506,7 +1504,7 @@ public class DiskInitFile implements DiskInitFileInterpreter {
   public void copyTo(File targetDir) throws IOException {
     lock.lock(false);
     try {
-      FileUtil.copy(this.ifFile, targetDir);
+      FileUtils.copyFileToDirectory(this.ifFile, targetDir);
     } finally {
       lock.unlock();
     }

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/main/java/org/apache/geode/internal/cache/DiskStoreImpl.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/internal/cache/DiskStoreImpl.java b/geode-core/src/main/java/org/apache/geode/internal/cache/DiskStoreImpl.java
index e53aa5d..cce8100 100644
--- a/geode-core/src/main/java/org/apache/geode/internal/cache/DiskStoreImpl.java
+++ b/geode-core/src/main/java/org/apache/geode/internal/cache/DiskStoreImpl.java
@@ -14,25 +14,47 @@
  */
 package org.apache.geode.internal.cache;
 
+import static org.apache.geode.distributed.ConfigurationProperties.CACHE_XML_FILE;
+import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS;
+import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
+
+import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
+import it.unimi.dsi.fastutil.longs.LongOpenHashSet;
+import org.apache.commons.io.FileUtils;
 import org.apache.geode.CancelCriterion;
 import org.apache.geode.CancelException;
 import org.apache.geode.StatisticsFactory;
 import org.apache.geode.SystemFailure;
-import org.apache.geode.cache.*;
+import org.apache.geode.cache.Cache;
+import org.apache.geode.cache.CacheClosedException;
+import org.apache.geode.cache.DiskAccessException;
+import org.apache.geode.cache.DiskStore;
+import org.apache.geode.cache.DiskStoreFactory;
+import org.apache.geode.cache.RegionDestroyedException;
 import org.apache.geode.cache.persistence.PersistentID;
 import org.apache.geode.distributed.DistributedSystem;
 import org.apache.geode.distributed.internal.DistributionConfig;
 import org.apache.geode.distributed.internal.InternalDistributedSystem;
 import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
 import org.apache.geode.i18n.StringId;
-import org.apache.geode.internal.FileUtil;
 import org.apache.geode.internal.Version;
 import org.apache.geode.internal.cache.DiskEntry.Helper.ValueWrapper;
 import org.apache.geode.internal.cache.DiskEntry.RecoveredEntry;
 import org.apache.geode.internal.cache.ExportDiskRegion.ExportWriter;
 import org.apache.geode.internal.cache.lru.LRUAlgorithm;
 import org.apache.geode.internal.cache.lru.LRUStatistics;
-import org.apache.geode.internal.cache.persistence.*;
+import org.apache.geode.internal.cache.persistence.BackupInspector;
+import org.apache.geode.internal.cache.persistence.BackupManager;
+import org.apache.geode.internal.cache.persistence.BytesAndBits;
+import org.apache.geode.internal.cache.persistence.DiskRecoveryStore;
+import org.apache.geode.internal.cache.persistence.DiskRegionView;
+import org.apache.geode.internal.cache.persistence.DiskStoreFilter;
+import org.apache.geode.internal.cache.persistence.DiskStoreID;
+import org.apache.geode.internal.cache.persistence.OplogType;
+import org.apache.geode.internal.cache.persistence.PRPersistentConfig;
+import org.apache.geode.internal.cache.persistence.PersistentMemberID;
+import org.apache.geode.internal.cache.persistence.PersistentMemberPattern;
+import org.apache.geode.internal.cache.persistence.RestoreScript;
 import org.apache.geode.internal.cache.snapshot.GFSnapshot;
 import org.apache.geode.internal.cache.snapshot.GFSnapshot.SnapshotWriter;
 import org.apache.geode.internal.cache.snapshot.SnapshotPacket.SnapshotRecord;
@@ -50,17 +72,41 @@ import org.apache.geode.pdx.internal.EnumInfo;
 import org.apache.geode.pdx.internal.PdxField;
 import org.apache.geode.pdx.internal.PdxType;
 import org.apache.geode.pdx.internal.PeerTypeRegistration;
-import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
-import it.unimi.dsi.fastutil.longs.LongOpenHashSet;
 import org.apache.logging.log4j.Logger;
 
-import java.io.*;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.io.PrintStream;
 import java.net.InetAddress;
 import java.nio.channels.ClosedByInterruptException;
 import java.nio.channels.FileChannel;
 import java.nio.channels.FileLock;
-import java.util.*;
-import java.util.concurrent.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Future;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
@@ -70,8 +116,6 @@ import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import static org.apache.geode.distributed.ConfigurationProperties.*;
-
 /**
  * Represents a (disk-based) persistent store for region data. Used for both persistent recoverable
  * regions and overflow-only regions.
@@ -86,15 +130,15 @@ public class DiskStoreImpl implements DiskStore {
   private static final String BACKUP_DIR_PREFIX = "dir";
   public static final boolean KRF_DEBUG = Boolean.getBoolean("disk.KRF_DEBUG");
 
-  public static final int MAX_OPEN_INACTIVE_OPLOGS = Integer
-      .getInteger(DistributionConfig.GEMFIRE_PREFIX + "MAX_OPEN_INACTIVE_OPLOGS", 7).intValue();
+  public static final int MAX_OPEN_INACTIVE_OPLOGS =
+      Integer.getInteger(DistributionConfig.GEMFIRE_PREFIX + "MAX_OPEN_INACTIVE_OPLOGS", 7);
 
   /*
    * If less than 20MB (default - configurable through this property) of the available space is left
    * for logging and other misc stuff then it is better to bail out.
    */
-  public static final int MIN_DISK_SPACE_FOR_LOGS = Integer
-      .getInteger(DistributionConfig.GEMFIRE_PREFIX + "MIN_DISK_SPACE_FOR_LOGS", 20).intValue();
+  public static final int MIN_DISK_SPACE_FOR_LOGS =
+      Integer.getInteger(DistributionConfig.GEMFIRE_PREFIX + "MIN_DISK_SPACE_FOR_LOGS", 20);
 
   /** Represents an invalid id of a key/value on disk */
   public static final long INVALID_ID = 0L; // must be zero
@@ -158,17 +202,15 @@ public class DiskStoreImpl implements DiskStore {
    * other regions waiting for a compactor thread from the pool. Ignored if set to <= 0. Made non
    * static so tests can set it.
    */
-  private final int MAX_OPLOGS_PER_COMPACTION = Integer
-      .getInteger(DistributionConfig.GEMFIRE_PREFIX + "MAX_OPLOGS_PER_COMPACTION", Integer
-          .getInteger(DistributionConfig.GEMFIRE_PREFIX + "MAX_OPLOGS_PER_ROLL", 1).intValue())
-      .intValue();
+  private final int MAX_OPLOGS_PER_COMPACTION = Integer.getInteger(
+      DistributionConfig.GEMFIRE_PREFIX + "MAX_OPLOGS_PER_COMPACTION",
+      Integer.getInteger(DistributionConfig.GEMFIRE_PREFIX + "MAX_OPLOGS_PER_ROLL", 1).intValue());
   /**
    *
    */
-  public static final int MAX_CONCURRENT_COMPACTIONS = Integer
-      .getInteger(DistributionConfig.GEMFIRE_PREFIX + "MAX_CONCURRENT_COMPACTIONS", Integer
-          .getInteger(DistributionConfig.GEMFIRE_PREFIX + "MAX_CONCURRENT_ROLLS", 1).intValue())
-      .intValue();
+  public static final int MAX_CONCURRENT_COMPACTIONS = Integer.getInteger(
+      DistributionConfig.GEMFIRE_PREFIX + "MAX_CONCURRENT_COMPACTIONS",
+      Integer.getInteger(DistributionConfig.GEMFIRE_PREFIX + "MAX_CONCURRENT_ROLLS", 1).intValue());
 
   /**
    * This system property indicates that maximum number of delayed write tasks that can be pending
@@ -1441,8 +1483,8 @@ public class DiskStoreImpl implements DiskStore {
   private volatile boolean flusherThreadTerminated;
 
   private void startAsyncFlusher() {
-    final String thName = LocalizedStrings.DiskRegion_ASYNCHRONOUS_DISK_WRITER_0
-        .toLocalizedString(new Object[] {getName()});
+    final String thName =
+        LocalizedStrings.DiskRegion_ASYNCHRONOUS_DISK_WRITER_0.toLocalizedString(getName());
     this.flusherThread = new Thread(
         LoggingThreadGroup.createThreadGroup(
             LocalizedStrings.DiskRegion_DISK_WRITERS.toLocalizedString(), logger),
@@ -1802,7 +1844,7 @@ public class DiskStoreImpl implements DiskStore {
         f.deleteOnExit();
         dae = null;
         break;
-      } catch (IOException ex) {
+      } catch (IOException | IllegalStateException ex) {
         if (fs != null) {
           try {
             fs.close();
@@ -1811,16 +1853,6 @@ public class DiskStoreImpl implements DiskStore {
         }
         dae = new DiskAccessException(
             LocalizedStrings.Oplog_COULD_NOT_LOCK_0.toLocalizedString(f.getPath()), ex, this);
-      } catch (IllegalStateException ex2) {
-        // OverlappingFileLockExtension needs to be caught here see bug 41290
-        if (fs != null) {
-          try {
-            fs.close();
-          } catch (IOException ignore) {
-          }
-        }
-        dae = new DiskAccessException(
-            LocalizedStrings.Oplog_COULD_NOT_LOCK_0.toLocalizedString(f.getPath()), ex2, this);
       }
       cnt++;
       try {
@@ -1945,17 +1977,7 @@ public class DiskStoreImpl implements DiskStore {
       {
         FilenameFilter overflowFileFilter =
             new DiskStoreFilter(OplogType.OVERFLOW, true, partialFileName);
-        for (DirectoryHolder dh : this.directories) {
-          File dir = dh.getDir();
-          // delete all overflow files
-          File[] files = FileUtil.listFiles(dir, overflowFileFilter);
-          for (File file : files) {
-            boolean deleted = file.delete();
-            if (!deleted && file.exists() && logger.isDebugEnabled()) {
-              logger.debug("Could not delete file {}", file);
-            }
-          }
-        }
+        deleteFiles(overflowFileFilter);
       }
 
       persistentOplogs.createOplogs(needsOplogs, persistentBackupFiles);
@@ -1991,8 +2013,8 @@ public class DiskStoreImpl implements DiskStore {
   private void statsClose() {
     this.stats.close();
     if (this.directories != null) {
-      for (int i = 0; i < this.directories.length; i++) {
-        this.directories[i].close();
+      for (final DirectoryHolder directory : this.directories) {
+        directory.close();
       }
     }
   }
@@ -2137,9 +2159,7 @@ public class DiskStoreImpl implements DiskStore {
     try {
       // Now while holding the write lock remove any elements from the queue
       // for this region.
-      Iterator<Object> it = this.asyncQueue.iterator();
-      while (it.hasNext()) {
-        Object o = it.next();
+      for (final Object o : this.asyncQueue) {
         if (o instanceof AsyncDiskEntry) {
           AsyncDiskEntry ade = (AsyncDiskEntry) o;
           if (shouldClear(region, rvv, ade)) {
@@ -2531,8 +2551,7 @@ public class DiskStoreImpl implements DiskStore {
 
   int incBackgroundTasks() {
     getCache().getCachePerfStats().incDiskTasksWaiting();
-    int v = this.backgroundTasks.incrementAndGet();
-    return v;
+    return this.backgroundTasks.incrementAndGet();
   }
 
   void decBackgroundTasks() {
@@ -2636,13 +2655,14 @@ public class DiskStoreImpl implements DiskStore {
   }
 
   private void deleteFiles(FilenameFilter overflowFileFilter) {
-    for (int i = 0; i < this.directories.length; i++) {
-      File dir = this.directories[i].getDir();
-      File[] files = FileUtil.listFiles(dir, overflowFileFilter);
-      for (File file : files) {
-        boolean deleted = file.delete();
-        if (!deleted && file.exists() && logger.isDebugEnabled()) {
-          logger.debug("Could not delete file {}", file);
+    for (final DirectoryHolder directory : this.directories) {
+      File[] files = directory.getDir().listFiles(overflowFileFilter);
+      if (files != null) {
+        for (File file : files) {
+          boolean deleted = file.delete();
+          if (!deleted && file.exists() && logger.isDebugEnabled()) {
+            logger.debug("Could not delete file {}", file);
+          }
         }
       }
     }
@@ -2738,8 +2758,8 @@ public class DiskStoreImpl implements DiskStore {
 
     // Find all of the member's diskstore oplogs in the member's baseline
     // diskstore directory structure (*.crf,*.krf,*.drf)
-    List<File> baselineOplogFiles = FileUtil.findAll(baselineDir, ".*\\.[kdc]rf$");
-
+    Collection<File> baselineOplogFiles =
+        FileUtils.listFiles(baselineDir, new String[] {"krf", "drf", "crf"}, true);
     // Our list of oplogs to copy (those not already in the baseline)
     List<Oplog> oplogList = new LinkedList<Oplog>();
 
@@ -2858,11 +2878,7 @@ public class DiskStoreImpl implements DiskStore {
      * @return true if compaction done; false if it was not
      */
     private synchronized boolean scheduleIfNeeded(CompactableOplog[] opLogs) {
-      if (!this.scheduled) {
-        return schedule(opLogs);
-      } else {
-        return false;
-      }
+      return !this.scheduled && schedule(opLogs);
     }
 
     /**
@@ -2873,8 +2889,8 @@ public class DiskStoreImpl implements DiskStore {
       if (!this.compactorEnabled)
         return false;
       if (opLogs != null) {
-        for (int i = 0; i < opLogs.length; i++) {
-          opLogs[i].prepareForCompact();
+        for (final CompactableOplog opLog : opLogs) {
+          opLog.prepareForCompact();
         }
         this.scheduled = true;
         this.scheduledOplogs = opLogs;
@@ -2922,10 +2938,7 @@ public class DiskStoreImpl implements DiskStore {
         return true;
       }
       CancelCriterion stopper = getCache().getCancelCriterion();
-      if (stopper.isCancelInProgress()) {
-        return true;
-      }
-      return false;
+      return stopper.isCancelInProgress();
     }
 
     /**
@@ -2953,7 +2966,7 @@ public class DiskStoreImpl implements DiskStore {
           String tName = "OplogCompactor " + getName() + " for oplog " + oplogs[0].toString();
           Thread.currentThread().setName(tName);
 
-          StringBuffer buffer = new StringBuffer();
+          StringBuilder buffer = new StringBuilder();
           for (int j = 0; j < oplogs.length; ++j) {
             buffer.append(oplogs[j].toString());
             if (j + 1 < oplogs.length) {
@@ -3239,9 +3252,8 @@ public class DiskStoreImpl implements DiskStore {
 
     // NOTE - do NOT use DM.cacheTimeMillis here. See bug #49920
     long timestamp = System.currentTimeMillis();
-    PersistentMemberID id = new PersistentMemberID(getDiskStoreID(), memberId.getInetAddress(),
+    return new PersistentMemberID(getDiskStoreID(), memberId.getInetAddress(),
         firstDir.getAbsolutePath(), memberId.getName(), timestamp, (short) 0);
-    return id;
   }
 
   public PersistentID getPersistentID() {
@@ -3534,12 +3546,12 @@ public class DiskStoreImpl implements DiskStore {
     public String toString() {
       StringBuilder sb = new StringBuilder();
       sb.append("dr=").append(region.getDiskRegion().getId());
-      sb.append(" versionOnly=" + this.versionOnly);
+      sb.append(" versionOnly=").append(this.versionOnly);
       if (this.versionOnly) {
-        sb.append(" versionTag=" + this.tag);
+        sb.append(" versionTag=").append(this.tag);
       }
       if (de != null) {
-        sb.append(" key=" + de.getKey());
+        sb.append(" key=").append(de.getKey());
       } else {
         sb.append(" <END CLEAR>");
       }
@@ -3698,18 +3710,8 @@ public class DiskStoreImpl implements DiskStore {
           enums.add((EnumInfo) i);
         }
       }
-      Collections.sort(types, new Comparator<PdxType>() {
-        @Override
-        public int compare(PdxType o1, PdxType o2) {
-          return o1.getClassName().compareTo(o2.getClassName());
-        }
-      });
-      Collections.sort(enums, new Comparator<EnumInfo>() {
-        @Override
-        public int compare(EnumInfo o1, EnumInfo o2) {
-          return o1.compareTo(o2);
-        }
-      });
+      types.sort(Comparator.comparing(PdxType::getClassName));
+      enums.sort(EnumInfo::compareTo);
 
       printStream.println("PDX Types:");
       for (PdxType type : types) {
@@ -4132,7 +4134,7 @@ public class DiskStoreImpl implements DiskStore {
         }
 
         // Get an appropriate lock object for each set of oplogs.
-        Object childLock = childOplog == null ? new Object() : childOplog.lock;;
+        Object childLock = childOplog.lock;;
 
         // TODO - We really should move this lock into the disk store, but
         // until then we need to do this magic to make sure we're actually
@@ -4152,7 +4154,7 @@ public class DiskStoreImpl implements DiskStore {
           // Create the directories for this disk store
           for (int i = 0; i < directories.length; i++) {
             File dir = getBackupDir(targetDir, i);
-            if (!FileUtil.mkdirs(dir)) {
+            if (!dir.mkdirs()) {
               throw new IOException("Could not create directory " + dir);
             }
             restoreScript.addFile(directories[i].getDir(), dir);
@@ -4364,7 +4366,7 @@ public class DiskStoreImpl implements DiskStore {
     try {
       DiskStoreImpl dsi = createForOffline(dsName, dsDirs, false);
       dsi.dumpInfo(printStream, regName);
-      if (listPdxTypes != null && listPdxTypes.booleanValue()) {
+      if (listPdxTypes != null && listPdxTypes) {
         dsi.dumpPdxTypes(printStream);
       }
     } finally {
@@ -4554,7 +4556,7 @@ public class DiskStoreImpl implements DiskStore {
     boolean result = false;
     Boolean tmp = backgroundTaskThread.get();
     if (tmp != null) {
-      result = tmp.booleanValue();
+      result = tmp;
     }
     return result;
   }

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/main/java/org/apache/geode/internal/cache/Oplog.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/internal/cache/Oplog.java b/geode-core/src/main/java/org/apache/geode/internal/cache/Oplog.java
index 270c833..f5aec51 100755
--- a/geode-core/src/main/java/org/apache/geode/internal/cache/Oplog.java
+++ b/geode-core/src/main/java/org/apache/geode/internal/cache/Oplog.java
@@ -14,14 +14,34 @@
  */
 package org.apache.geode.internal.cache;
 
+import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
+import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
+import it.unimi.dsi.fastutil.longs.Long2ObjectMap;
+import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap;
+import it.unimi.dsi.fastutil.objects.ObjectIterator;
+import org.apache.commons.io.FileUtils;
 import org.apache.geode.CancelException;
 import org.apache.geode.DataSerializer;
 import org.apache.geode.SerializationException;
-import org.apache.geode.cache.*;
+import org.apache.geode.cache.CacheClosedException;
+import org.apache.geode.cache.CacheWriterException;
+import org.apache.geode.cache.DiskAccessException;
+import org.apache.geode.cache.EntryDestroyedException;
+import org.apache.geode.cache.EntryEvent;
+import org.apache.geode.cache.EntryNotFoundException;
+import org.apache.geode.cache.RegionDestroyedException;
+import org.apache.geode.cache.TimeoutException;
+import org.apache.geode.cache.UnsupportedVersionException;
 import org.apache.geode.distributed.OplogCancelledException;
 import org.apache.geode.distributed.internal.DM;
 import org.apache.geode.distributed.internal.DistributionConfig;
-import org.apache.geode.internal.*;
+import org.apache.geode.internal.Assert;
+import org.apache.geode.internal.ByteArrayDataInput;
+import org.apache.geode.internal.HeapDataOutputStream;
+import org.apache.geode.internal.InternalDataSerializer;
+import org.apache.geode.internal.InternalStatisticsDisabledException;
+import org.apache.geode.internal.Sendable;
+import org.apache.geode.internal.Version;
 import org.apache.geode.internal.cache.DiskEntry.Helper.Flushable;
 import org.apache.geode.internal.cache.DiskEntry.Helper.ValueWrapper;
 import org.apache.geode.internal.cache.DiskInitFile.DiskRegionFlag;
@@ -30,8 +50,19 @@ import org.apache.geode.internal.cache.DiskStoreImpl.OplogEntryIdSet;
 import org.apache.geode.internal.cache.DistributedRegion.DiskPosition;
 import org.apache.geode.internal.cache.lru.EnableLRU;
 import org.apache.geode.internal.cache.lru.NewLRUClockHand;
-import org.apache.geode.internal.cache.persistence.*;
-import org.apache.geode.internal.cache.versions.*;
+import org.apache.geode.internal.cache.persistence.BytesAndBits;
+import org.apache.geode.internal.cache.persistence.DiskRecoveryStore;
+import org.apache.geode.internal.cache.persistence.DiskRegionView;
+import org.apache.geode.internal.cache.persistence.DiskStoreID;
+import org.apache.geode.internal.cache.persistence.UninterruptibleFileChannel;
+import org.apache.geode.internal.cache.persistence.UninterruptibleRandomAccessFile;
+import org.apache.geode.internal.cache.versions.CompactVersionHolder;
+import org.apache.geode.internal.cache.versions.RegionVersionHolder;
+import org.apache.geode.internal.cache.versions.RegionVersionVector;
+import org.apache.geode.internal.cache.versions.VersionHolder;
+import org.apache.geode.internal.cache.versions.VersionSource;
+import org.apache.geode.internal.cache.versions.VersionStamp;
+import org.apache.geode.internal.cache.versions.VersionTag;
 import org.apache.geode.internal.i18n.LocalizedStrings;
 import org.apache.geode.internal.logging.LogService;
 import org.apache.geode.internal.logging.log4j.LocalizedMessage;
@@ -47,20 +78,35 @@ import org.apache.geode.internal.util.BlobHelper;
 import org.apache.geode.internal.util.IOUtils;
 import org.apache.geode.internal.util.TransformUtils;
 import org.apache.geode.pdx.internal.PdxWriterImpl;
-
-import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
-import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
-import it.unimi.dsi.fastutil.longs.Long2ObjectMap;
-import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap;
-import it.unimi.dsi.fastutil.objects.ObjectIterator;
-
 import org.apache.logging.log4j.Logger;
 
-import java.io.*;
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.io.SyncFailedException;
 import java.nio.ByteBuffer;
 import java.nio.channels.ClosedChannelException;
-import java.util.*;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.atomic.AtomicBoolean;
@@ -1176,7 +1222,7 @@ public final class Oplog implements CompactableOplog, Flushable {
    * @return a map of baslineline oplog files to copy. May be empty if total current set for this
    *         oplog does not match the baseline.
    */
-  Map<File, File> mapBaseline(List<File> baselineOplogFiles) {
+  Map<File, File> mapBaseline(Collection<File> baselineOplogFiles) {
     // Map of baseline oplog file name to oplog file
     Map<String, File> baselineOplogMap =
         TransformUtils.transformAndMap(baselineOplogFiles, TransformUtils.fileNameTransformer);
@@ -5759,13 +5805,13 @@ public final class Oplog implements CompactableOplog, Flushable {
 
   public void copyTo(File targetDir) throws IOException {
     if (this.crf.f != null) { // fixes bug 43951
-      FileUtil.copy(this.crf.f, targetDir);
+      FileUtils.copyFileToDirectory(this.crf.f, targetDir);
     }
-    FileUtil.copy(this.drf.f, targetDir);
+    FileUtils.copyFileToDirectory(this.drf.f, targetDir);
 
     // this krf existence check fixes 45089
     if (getParent().getDiskInitFile().hasKrf(this.oplogId)) {
-      FileUtil.copy(this.getKrfFile(), targetDir);
+      FileUtils.copyFileToDirectory(this.getKrfFile(), targetDir);
     }
   }
 
@@ -5788,7 +5834,7 @@ public final class Oplog implements CompactableOplog, Flushable {
         return;
       if (!olf.f.exists())
         return;
-      assert olf.RAFClosed == true;
+      assert olf.RAFClosed;
       if (!olf.RAFClosed || olf.raf != null) {
         try {
           olf.raf.close();

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/main/java/org/apache/geode/internal/cache/PersistentOplogSet.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/internal/cache/PersistentOplogSet.java b/geode-core/src/main/java/org/apache/geode/internal/cache/PersistentOplogSet.java
index a713941..036c740 100644
--- a/geode-core/src/main/java/org/apache/geode/internal/cache/PersistentOplogSet.java
+++ b/geode-core/src/main/java/org/apache/geode/internal/cache/PersistentOplogSet.java
@@ -15,6 +15,20 @@
 package org.apache.geode.internal.cache;
 
 import it.unimi.dsi.fastutil.longs.LongOpenHashSet;
+import org.apache.geode.cache.DiskAccessException;
+import org.apache.geode.internal.cache.DiskEntry.Helper.ValueWrapper;
+import org.apache.geode.internal.cache.DiskStoreImpl.OplogEntryIdSet;
+import org.apache.geode.internal.cache.persistence.DiskRecoveryStore;
+import org.apache.geode.internal.cache.persistence.DiskRegionView;
+import org.apache.geode.internal.cache.persistence.DiskStoreFilter;
+import org.apache.geode.internal.cache.persistence.OplogType;
+import org.apache.geode.internal.cache.versions.RegionVersionVector;
+import org.apache.geode.internal.i18n.LocalizedStrings;
+import org.apache.geode.internal.logging.LogService;
+import org.apache.geode.internal.logging.log4j.LocalizedMessage;
+import org.apache.geode.internal.logging.log4j.LogMarker;
+import org.apache.geode.internal.sequencelog.EntryLogger;
+import org.apache.logging.log4j.Logger;
 
 import java.io.File;
 import java.io.FilenameFilter;
@@ -32,23 +46,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 
-import org.apache.logging.log4j.Logger;
-
-import org.apache.geode.cache.DiskAccessException;
-import org.apache.geode.internal.FileUtil;
-import org.apache.geode.internal.cache.DiskEntry.Helper.ValueWrapper;
-import org.apache.geode.internal.cache.DiskStoreImpl.OplogEntryIdSet;
-import org.apache.geode.internal.cache.persistence.DiskRecoveryStore;
-import org.apache.geode.internal.cache.persistence.DiskRegionView;
-import org.apache.geode.internal.cache.persistence.DiskStoreFilter;
-import org.apache.geode.internal.cache.persistence.OplogType;
-import org.apache.geode.internal.cache.versions.RegionVersionVector;
-import org.apache.geode.internal.i18n.LocalizedStrings;
-import org.apache.geode.internal.logging.LogService;
-import org.apache.geode.internal.logging.log4j.LocalizedMessage;
-import org.apache.geode.internal.logging.log4j.LogMarker;
-import org.apache.geode.internal.sequencelog.EntryLogger;
-
 public class PersistentOplogSet implements OplogSet {
   private static final Logger logger = LogService.getLogger();
 
@@ -212,10 +209,11 @@ public class PersistentOplogSet implements OplogSet {
     Map<File, DirectoryHolder> backupFiles = new HashMap<File, DirectoryHolder>();
     FilenameFilter backupFileFilter = getFileNameFilter(partialFileName);
     for (DirectoryHolder dh : parent.directories) {
-      File dir = dh.getDir();
-      File[] backupList = FileUtil.listFiles(dir, backupFileFilter);
-      for (File f : backupList) {
-        backupFiles.put(f, dh);
+      File[] backupList = dh.getDir().listFiles(backupFileFilter);
+      if (backupList != null) {
+        for (File f : backupList) {
+          backupFiles.put(f, dh);
+        }
       }
     }
 

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/main/java/org/apache/geode/internal/cache/persistence/BackupManager.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/internal/cache/persistence/BackupManager.java b/geode-core/src/main/java/org/apache/geode/internal/cache/persistence/BackupManager.java
index e4e5467..d052551 100644
--- a/geode-core/src/main/java/org/apache/geode/internal/cache/persistence/BackupManager.java
+++ b/geode-core/src/main/java/org/apache/geode/internal/cache/persistence/BackupManager.java
@@ -14,6 +14,7 @@
  */
 package org.apache.geode.internal.cache.persistence;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.geode.InternalGemFireError;
 import org.apache.geode.cache.persistence.PersistentID;
 import org.apache.geode.distributed.DistributedSystem;
@@ -21,7 +22,6 @@ import org.apache.geode.distributed.internal.DM;
 import org.apache.geode.distributed.internal.DistributionConfig;
 import org.apache.geode.distributed.internal.MembershipListener;
 import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
-import org.apache.geode.internal.FileUtil;
 import org.apache.geode.internal.JarClassLoader;
 import org.apache.geode.internal.JarDeployer;
 import org.apache.geode.internal.cache.DiskStoreImpl;
@@ -30,10 +30,18 @@ import org.apache.geode.internal.i18n.LocalizedStrings;
 
 import java.io.File;
 import java.io.FileOutputStream;
+import java.io.FilenameFilter;
 import java.io.IOException;
 import java.net.URL;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
 import java.util.concurrent.CountDownLatch;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 /**
  * This class manages the state an logic to backup a single cache.
@@ -108,20 +116,19 @@ public class BackupManager implements MembershipListener {
      * Find the first matching DiskStoreId directory for this member.
      */
     for (DiskStoreImpl diskStore : cache.listDiskStoresIncludingRegionOwned()) {
-      baselineDir = FileUtil.find(baselineParentDir, ".*" + diskStore.getBackupDirName() + "$");
-      if (null != baselineDir) {
-        break;
-      }
-    }
+      File[] matchingFiles = baselineParentDir.listFiles(new FilenameFilter() {
+        Pattern pattern = Pattern.compile(".*" + diskStore.getBackupDirName() + "$");
 
-    /*
-     * We found it? Good. Set this member's baseline to the backed up disk store's member dir (two
-     * levels up).
-     */
-    if (null != baselineDir) {
-      baselineDir = baselineDir.getParentFile().getParentFile();
+        public boolean accept(File dir, String name) {
+          Matcher m = pattern.matcher(name);
+          return m.find();
+        }
+      });
+      // We found it? Good. Set this member's baseline to the backed up disk store's member dir (two
+      // levels up).
+      if (null != matchingFiles && matchingFiles.length > 0)
+        baselineDir = matchingFiles[0].getParentFile().getParentFile();
     }
-
     return baselineDir;
   }
 
@@ -233,19 +240,19 @@ public class BackupManager implements MembershipListener {
 
   private void backupConfigFiles(RestoreScript restoreScript, File backupDir) throws IOException {
     File configBackupDir = new File(backupDir, CONFIG);
-    FileUtil.mkdirs(configBackupDir);
+    configBackupDir.mkdirs();
     URL url = cache.getCacheXmlURL();
     if (url != null) {
       File cacheXMLBackup =
           new File(configBackupDir, DistributionConfig.DEFAULT_CACHE_XML_FILE.getName());
-      FileUtil.copy(url, cacheXMLBackup);
+      FileUtils.copyFile(new File(cache.getCacheXmlURL().getFile()), cacheXMLBackup);
     }
 
-    URL propertyURL = DistributedSystem.getPropertyFileURL();
+    URL propertyURL = DistributedSystem.getPropertiesFileURL();
     if (propertyURL != null) {
       File propertyBackup =
           new File(configBackupDir, DistributionConfig.GEMFIRE_PREFIX + "properties");
-      FileUtil.copy(propertyURL, propertyBackup);
+      FileUtils.copyFile(new File(DistributedSystem.getPropertiesFile()), propertyBackup);
     }
 
     // TODO sbawaska: should the gfsecurity.properties file be backed up?
@@ -261,7 +268,11 @@ public class BackupManager implements MembershipListener {
       if (original.exists()) {
         original = original.getAbsoluteFile();
         File dest = new File(userBackupDir, original.getName());
-        FileUtil.copy(original, dest);
+        if (original.isDirectory()) {
+          FileUtils.copyDirectory(original, dest);
+        } else {
+          FileUtils.copyFile(original, dest);
+        }
         restoreScript.addExistenceTest(original);
         restoreScript.addFile(original, dest);
       }
@@ -296,7 +307,11 @@ public class BackupManager implements MembershipListener {
         for (JarClassLoader loader : jarList) {
           File source = new File(loader.getFileCanonicalPath());
           File dest = new File(userBackupDir, source.getName());
-          FileUtil.copy(source, dest);
+          if (source.isDirectory()) {
+            FileUtils.copyDirectory(source, dest);
+          } else {
+            FileUtils.copyFile(source, dest);
+          }
           restoreScript.addFile(source, dest);
         }
       }
@@ -325,7 +340,7 @@ public class BackupManager implements MembershipListener {
       throw new IOException("Backup directory " + backupDir.getAbsolutePath() + " already exists.");
     }
 
-    if (!FileUtil.mkdirs(backupDir)) {
+    if (!backupDir.mkdirs()) {
       throw new IOException("Could not create directory: " + backupDir);
     }
 

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/main/java/org/apache/geode/internal/cache/persistence/RestoreScript.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/internal/cache/persistence/RestoreScript.java b/geode-core/src/main/java/org/apache/geode/internal/cache/persistence/RestoreScript.java
index 86f880e..3076e04 100644
--- a/geode-core/src/main/java/org/apache/geode/internal/cache/persistence/RestoreScript.java
+++ b/geode-core/src/main/java/org/apache/geode/internal/cache/persistence/RestoreScript.java
@@ -23,8 +23,6 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.geode.internal.FileUtil;
-
 /**
  * This class is used to automatically generate a restore script for a backup. It keeps a list of
  * files that were backed up, and a list of files that we should test for to avoid overriding when
@@ -100,7 +98,7 @@ public class RestoreScript {
       for (Map.Entry<File, File> entry : backedUpFiles.entrySet()) {
         File backup = entry.getKey();
         boolean backupHasFiles = backup.isDirectory() && backup.list().length != 0;
-        backup = FileUtil.removeParent(outputDir, backup);
+        backup = outputDir.toPath().relativize(backup.toPath()).toFile();
         File original = entry.getValue();
         if (original.isDirectory()) {
           osGenerator.writeCopyDirectoryContents(writer, backup, original, backupHasFiles);
@@ -136,7 +134,7 @@ public class RestoreScript {
   private boolean isWindows() {
     String os = System.getProperty("os.name");
     if (os != null) {
-      if (os.indexOf("Windows") != -1) {
+      if (os.contains("Windows")) {
         return true;
       }
     }

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/main/java/org/apache/geode/internal/io/MainWithChildrenRollingFileHandler.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/internal/io/MainWithChildrenRollingFileHandler.java b/geode-core/src/main/java/org/apache/geode/internal/io/MainWithChildrenRollingFileHandler.java
index 11d10af..f814aeb 100644
--- a/geode-core/src/main/java/org/apache/geode/internal/io/MainWithChildrenRollingFileHandler.java
+++ b/geode-core/src/main/java/org/apache/geode/internal/io/MainWithChildrenRollingFileHandler.java
@@ -15,11 +15,9 @@
 package org.apache.geode.internal.io;
 
 import org.apache.geode.i18n.LogWriterI18n;
-import org.apache.geode.internal.FileUtil;
 import org.apache.geode.internal.i18n.LocalizedStrings;
 
 import java.io.File;
-import java.io.FilenameFilter;
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.regex.Pattern;
@@ -42,7 +40,7 @@ public class MainWithChildrenRollingFileHandler implements RollingFileHandler {
     File dir = getParentFile(file.getAbsoluteFile());
     int endIdx1 = file.getName().indexOf('-');
     int endIdx2 = file.getName().lastIndexOf('.');
-    String baseName = file.getName();
+    String baseName;
     if (endIdx1 != -1) {
       baseName = file.getName().substring(0, endIdx1);
     } else {
@@ -222,26 +220,13 @@ public class MainWithChildrenRollingFileHandler implements RollingFileHandler {
   }
 
   private File[] findChildren(final File dir, final Pattern pattern) {
-    return FileUtil.listFiles(dir, new FilenameFilter() {
-      @Override
-      public boolean accept(File dir, String name) {
-        return pattern.matcher(name).matches();
-      }
-    });
+    return dir.listFiles((dir1, name) -> pattern.matcher(name).matches());
   }
 
   private File[] findChildrenExcept(final File dir, final Pattern pattern, final File exception) {
     final String exceptionName = (exception == null) ? null : exception.getName();
-    return FileUtil.listFiles(dir, new FilenameFilter() {
-      @Override
-      public boolean accept(File dir, String name) {
-        if (name.equals(exceptionName)) {
-          return false;
-        } else {
-          return pattern.matcher(name).matches();
-        }
-      }
-    });
+    return dir
+        .listFiles((dir1, name) -> !name.equals(exceptionName) && pattern.matcher(name).matches());
   }
 
 }

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/main/java/org/apache/geode/internal/logging/MergeLogFiles.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/internal/logging/MergeLogFiles.java b/geode-core/src/main/java/org/apache/geode/internal/logging/MergeLogFiles.java
index 7bb94ef..27f2116 100644
--- a/geode-core/src/main/java/org/apache/geode/internal/logging/MergeLogFiles.java
+++ b/geode-core/src/main/java/org/apache/geode/internal/logging/MergeLogFiles.java
@@ -14,6 +14,10 @@
  */
 package org.apache.geode.internal.logging;
 
+import org.apache.geode.SystemFailure;
+import org.apache.geode.internal.Assert;
+import org.apache.geode.internal.i18n.LocalizedStrings;
+
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileInputStream;
@@ -37,11 +41,6 @@ import java.util.concurrent.LinkedBlockingQueue;
 import java.util.regex.Pattern;
 import java.util.zip.GZIPInputStream;
 
-import org.apache.geode.SystemFailure;
-import org.apache.geode.internal.Assert;
-import org.apache.geode.internal.FileUtil;
-import org.apache.geode.internal.i18n.LocalizedStrings;
-
 /**
  * This program merges entries from multiple GemFire log files (those written using a
  * {@link org.apache.geode.i18n.LogWriterI18n} together, sorting them by their timestamp. Note that
@@ -188,17 +187,19 @@ public class MergeLogFiles {
    * @param dirName directory to search
    * @return all of the .log files found (Files)
    */
-  static ArrayList getLogFiles(String dirName) {
-    ArrayList result = new ArrayList();
+  static ArrayList<File> getLogFiles(String dirName) {
+    ArrayList<File> result = new ArrayList<>();
 
     File dir = new File(dirName);
-    File names[] = FileUtil.listFiles(dir);
-    for (int i = 0; i < names.length; i++) {
-      String n = names[i].getAbsolutePath();
-      if (n.endsWith(".log") || n.endsWith(".log.gz")) {
-        result.add(names[i]);
-      }
-    } // for
+    File names[] = dir.listFiles();
+    if (names != null) {
+      for (final File name : names) {
+        String n = name.getAbsolutePath();
+        if (n.endsWith(".log") || n.endsWith(".log.gz")) {
+          result.add(name);
+        }
+      } // for
+    }
     return result;
   }
 

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/test/java/org/apache/geode/cache/client/ClientCacheFactoryJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/cache/client/ClientCacheFactoryJUnitTest.java b/geode-core/src/test/java/org/apache/geode/cache/client/ClientCacheFactoryJUnitTest.java
index f881d38..c633a47 100644
--- a/geode-core/src/test/java/org/apache/geode/cache/client/ClientCacheFactoryJUnitTest.java
+++ b/geode-core/src/test/java/org/apache/geode/cache/client/ClientCacheFactoryJUnitTest.java
@@ -15,6 +15,15 @@
 
 package org.apache.geode.cache.client;
 
+import static org.apache.geode.distributed.ConfigurationProperties.CACHE_XML_FILE;
+import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS;
+import static org.apache.geode.distributed.ConfigurationProperties.LOG_LEVEL;
+import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+import static org.junit.runners.MethodSorters.NAME_ASCENDING;
+
+import org.apache.commons.io.FileUtils;
 import org.apache.geode.DataSerializer;
 import org.apache.geode.cache.RegionService;
 import org.apache.geode.cache.client.internal.ProxyCache;
@@ -24,7 +33,6 @@ import org.apache.geode.distributed.DistributedSystem;
 import org.apache.geode.distributed.internal.InternalDistributedSystem;
 import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
 import org.apache.geode.distributed.internal.membership.gms.GMSMember;
-import org.apache.geode.internal.FileUtil;
 import org.apache.geode.internal.HeapDataOutputStream;
 import org.apache.geode.internal.Version;
 import org.apache.geode.internal.VersionedDataInputStream;
@@ -50,11 +58,6 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Properties;
 
-import static org.apache.geode.distributed.ConfigurationProperties.*;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-import static org.junit.runners.MethodSorters.NAME_ASCENDING;
-
 /**
  * Unit test for the ClientCacheFactory class
  * 
@@ -125,7 +128,7 @@ public class ClientCacheFactoryJUnitTest {
     this.tmpFile.deleteOnExit();
     URL url = ClientCacheFactoryJUnitTest.class
         .getResource("ClientCacheFactoryJUnitTest_single_pool.xml");;
-    FileUtil.copy(url, this.tmpFile);
+    FileUtils.copyFile(new File(url.getFile()), this.tmpFile);
     this.cc = new ClientCacheFactory().set(CACHE_XML_FILE, this.tmpFile.getAbsolutePath()).create();
     GemFireCacheImpl gfc = (GemFireCacheImpl) this.cc;
     assertEquals(true, gfc.isClient());

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/test/java/org/apache/geode/cache/query/QueryTestUtils.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/cache/query/QueryTestUtils.java b/geode-core/src/test/java/org/apache/geode/cache/query/QueryTestUtils.java
index 2d6921b..f53faed 100644
--- a/geode-core/src/test/java/org/apache/geode/cache/query/QueryTestUtils.java
+++ b/geode-core/src/test/java/org/apache/geode/cache/query/QueryTestUtils.java
@@ -14,15 +14,9 @@
  */
 package org.apache.geode.cache.query;
 
-import static org.apache.geode.distributed.ConfigurationProperties.*;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
+import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.geode.LogWriter;
 import org.apache.geode.cache.AttributesFactory;
 import org.apache.geode.cache.Cache;
@@ -38,6 +32,13 @@ import org.apache.geode.cache30.CacheSerializableRunnable;
 import org.apache.geode.test.dunit.SerializableRunnable;
 import org.apache.geode.test.dunit.VM;
 
+import java.io.File;
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
 /**
  * Utility class for testing supported queries
  */
@@ -1763,12 +1764,11 @@ public class QueryTestUtils implements Serializable {
     Object[] result = new Object[qarr.length];
     String query = null;
     int j = 0;
-    for (int i = 0; i < qarr.length; i++) {
-      query = queries.get(qarr[i]);
-      if (query.indexOf("distinct") == -1)
-        query = query.replaceFirst("select", "select distinct");
-      else if (query.indexOf("DISTINCT") == -1)
+    for (final String aQarr : qarr) {
+      query = queries.get(aQarr);
+      if (!query.toLowerCase().contains("distinct")) {
         query = query.replaceFirst("select", "select distinct");
+      }
 
       // hydra.getLogWriter().info("\nExecuting query: " + query);
       try {
@@ -1799,7 +1799,7 @@ public class QueryTestUtils implements Serializable {
 
   public static void closeCacheInVM(VM vm) {
     vm.invoke(() -> {
-      getInstance().cache.close();
+      cache.close();
     });
   }
 
@@ -1822,7 +1822,7 @@ public class QueryTestUtils implements Serializable {
 
   public static File createTestRootDiskStore(String testName) throws IOException {
     File diskDir = new File(testName).getAbsoluteFile();
-    org.apache.geode.internal.FileUtil.delete(diskDir);
+    FileUtils.deleteDirectory(diskDir);
     diskDir.mkdir();
     diskDir.deleteOnExit();
     return diskDir;

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/test/java/org/apache/geode/cache/query/dunit/QueryIndexUsingXMLDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/cache/query/dunit/QueryIndexUsingXMLDUnitTest.java b/geode-core/src/test/java/org/apache/geode/cache/query/dunit/QueryIndexUsingXMLDUnitTest.java
index 66c4ecf..9bd20a6 100644
--- a/geode-core/src/test/java/org/apache/geode/cache/query/dunit/QueryIndexUsingXMLDUnitTest.java
+++ b/geode-core/src/test/java/org/apache/geode/cache/query/dunit/QueryIndexUsingXMLDUnitTest.java
@@ -14,18 +14,9 @@
  */
 package org.apache.geode.cache.query.dunit;
 
-import static org.junit.Assert.*;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Map;
-import java.util.Properties;
-
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import static org.junit.Assert.fail;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.geode.LogWriter;
 import org.apache.geode.cache.Cache;
 import org.apache.geode.cache.CacheExistsException;
@@ -44,7 +35,6 @@ import org.apache.geode.cache.query.internal.index.PartitionedIndex;
 import org.apache.geode.cache30.CacheSerializableRunnable;
 import org.apache.geode.distributed.internal.DistributionConfig;
 import org.apache.geode.distributed.internal.InternalDistributedSystem;
-import org.apache.geode.internal.FileUtil;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
 import org.apache.geode.internal.cache.LocalRegion;
 import org.apache.geode.internal.cache.PartitionedRegion;
@@ -62,6 +52,15 @@ import org.apache.geode.test.dunit.WaitCriterion;
 import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase;
 import org.apache.geode.test.junit.categories.DistributedTest;
 import org.apache.geode.util.test.TestUtil;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Properties;
 
 @Category(DistributedTest.class)
 public class QueryIndexUsingXMLDUnitTest extends JUnit4CacheTestCase {
@@ -70,7 +69,7 @@ public class QueryIndexUsingXMLDUnitTest extends JUnit4CacheTestCase {
 
   static private final int WAIT_DEFAULT = (60 * 1000);
 
-  public static final long MAX_TIME = Integer.getInteger(WAIT_PROPERTY, WAIT_DEFAULT).intValue();
+  public static final long MAX_TIME = Integer.getInteger(WAIT_PROPERTY, WAIT_DEFAULT);
 
   final String name = "PartionedPortfolios";
   final String repRegName = "Portfolios";
@@ -120,7 +119,9 @@ public class QueryIndexUsingXMLDUnitTest extends JUnit4CacheTestCase {
     // avoid creating a new cache just to get the diskstore name
     Invoke.invokeInEveryVM(resetTestHook());
     disconnectFromDS();
-    FileUtil.delete(new File(GemFireCacheImpl.DEFAULT_DS_NAME).getAbsoluteFile());
+    File deleteMe = new File(GemFireCacheImpl.DEFAULT_DS_NAME).getAbsoluteFile();
+    if (deleteMe.exists())
+      FileUtils.forceDelete(deleteMe);
   }
 
   /**
@@ -882,7 +883,7 @@ public class QueryIndexUsingXMLDUnitTest extends JUnit4CacheTestCase {
         // remove the disk store.
         File diskDir = new File(diskStoreName).getAbsoluteFile();
         try {
-          org.apache.geode.internal.FileUtil.delete(diskDir);
+          FileUtils.deleteDirectory(diskDir);
         } catch (Exception ex) {
           fail("Failed to delete the disDir");
         }

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/test/java/org/apache/geode/cache/query/functional/IndexCreationJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/cache/query/functional/IndexCreationJUnitTest.java b/geode-core/src/test/java/org/apache/geode/cache/query/functional/IndexCreationJUnitTest.java
index f126146..620ab17 100755
--- a/geode-core/src/test/java/org/apache/geode/cache/query/functional/IndexCreationJUnitTest.java
+++ b/geode-core/src/test/java/org/apache/geode/cache/query/functional/IndexCreationJUnitTest.java
@@ -24,23 +24,17 @@
  */
 package org.apache.geode.cache.query.functional;
 
-import static org.apache.geode.distributed.ConfigurationProperties.*;
-import static org.junit.Assert.*;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.Properties;
-import java.util.Set;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
+import static org.apache.geode.distributed.ConfigurationProperties.CACHE_XML_FILE;
+import static org.apache.geode.distributed.ConfigurationProperties.ENABLE_TIME_STATISTICS;
+import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
+import static org.apache.geode.distributed.ConfigurationProperties.NAME;
+import static org.apache.geode.distributed.ConfigurationProperties.STATISTIC_SAMPLING_ENABLED;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import org.apache.commons.io.FileUtils;
 import org.apache.geode.cache.AttributesFactory;
 import org.apache.geode.cache.Cache;
 import org.apache.geode.cache.CacheFactory;
@@ -55,7 +49,6 @@ import org.apache.geode.cache.query.Query;
 import org.apache.geode.cache.query.QueryInvalidException;
 import org.apache.geode.cache.query.QueryService;
 import org.apache.geode.cache.query.SelectResults;
-import org.apache.geode.cache.query.Utils;
 import org.apache.geode.cache.query.data.ComparableWrapper;
 import org.apache.geode.cache.query.data.Portfolio;
 import org.apache.geode.cache.query.internal.DefaultQueryService;
@@ -71,9 +64,21 @@ import org.apache.geode.cache.query.types.ObjectType;
 import org.apache.geode.cache.query.types.StructType;
 import org.apache.geode.distributed.DistributedSystem;
 import org.apache.geode.distributed.internal.InternalDistributedSystem;
-import org.apache.geode.internal.FileUtil;
 import org.apache.geode.internal.cache.LocalRegion;
 import org.apache.geode.test.junit.categories.IntegrationTest;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Properties;
+import java.util.Set;
 
 @Category(IntegrationTest.class)
 public class IndexCreationJUnitTest {
@@ -815,7 +820,7 @@ public class IndexCreationJUnitTest {
           .execute();
       assertEquals("OQL index results did not match", 1, results.size());
       ds.disconnect();
-      FileUtil.delete(file);
+      FileUtils.deleteDirectory(file);
     }
   }
 
@@ -845,7 +850,7 @@ public class IndexCreationJUnitTest {
         .newQuery("<trace>SELECT * FROM " + localRegion.getFullPath() + " Where ID > 0").execute();
     assertEquals("OQL index results did not match", 99, results.size());
     ds.disconnect();
-    FileUtil.delete(file);
+    FileUtils.deleteDirectory(file);
   }
 
   @Test
@@ -876,7 +881,7 @@ public class IndexCreationJUnitTest {
         .execute();
     assertEquals("OQL index results did not match", 50, results.size());
     ds.disconnect();
-    FileUtil.delete(file);
+    FileUtils.deleteDirectory(file);
   }
 
   @Test
@@ -954,7 +959,7 @@ public class IndexCreationJUnitTest {
           .execute();
       assertEquals("OQL index results did not match", 1, results.size());
       ds.disconnect();
-      FileUtil.delete(file);
+      FileUtils.deleteDirectory(file);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/test/java/org/apache/geode/distributed/AbstractLauncherIntegrationTestCase.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/distributed/AbstractLauncherIntegrationTestCase.java b/geode-core/src/test/java/org/apache/geode/distributed/AbstractLauncherIntegrationTestCase.java
index 0115193..09fa09e 100755
--- a/geode-core/src/test/java/org/apache/geode/distributed/AbstractLauncherIntegrationTestCase.java
+++ b/geode-core/src/test/java/org/apache/geode/distributed/AbstractLauncherIntegrationTestCase.java
@@ -14,9 +14,11 @@
  */
 package org.apache.geode.distributed;
 
+import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
+import static org.junit.Assert.assertTrue;
+
 import org.apache.geode.distributed.internal.DistributionConfig;
 import org.apache.geode.distributed.internal.InternalDistributedSystem;
-import org.apache.geode.internal.FileUtil;
 import org.apache.geode.internal.lang.StringUtils;
 import org.apache.geode.internal.logging.LogService;
 import org.apache.geode.internal.process.PidUnavailableException;
@@ -31,16 +33,18 @@ import org.junit.Rule;
 import org.junit.contrib.java.lang.system.RestoreSystemProperties;
 import org.junit.rules.TestName;
 
-import java.io.*;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
 import java.net.ServerSocket;
+import java.nio.file.Files;
 import java.util.List;
 import java.util.Properties;
 import java.util.concurrent.Callable;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
-import static org.junit.Assert.assertTrue;
-
 /**
  * @since GemFire 8.0
  */
@@ -105,7 +109,7 @@ public abstract class AbstractLauncherIntegrationTestCase {
           return true;
         }
         try {
-          FileUtil.delete(file);
+          Files.delete(file.toPath());
         } catch (IOException e) {
         }
         return !file.exists();

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/test/java/org/apache/geode/internal/FileUtilJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/internal/FileUtilJUnitTest.java b/geode-core/src/test/java/org/apache/geode/internal/FileUtilJUnitTest.java
deleted file mode 100644
index 942059e..0000000
--- a/geode-core/src/test/java/org/apache/geode/internal/FileUtilJUnitTest.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
- * agreements. See the NOTICE file distributed with this work for additional information regarding
- * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.geode.internal;
-
-import static org.junit.Assert.*;
-
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-import org.apache.geode.test.junit.categories.IntegrationTest;
-
-@Category(IntegrationTest.class)
-public class FileUtilJUnitTest {
-
-  @Test
-  public void testCopyFile() throws IOException {
-    File source = File.createTempFile("FileUtilJUnitTest", null);
-    File dest = File.createTempFile("FileUtilJUnitTest", null);
-    try {
-      FileOutputStream fos = new FileOutputStream(source);
-      DataOutput daos = new DataOutputStream(fos);
-      try {
-        for (long i = 0; i < FileUtil.MAX_TRANSFER_SIZE * 2.5 / 8; i++) {
-          daos.writeLong(i);
-        }
-      } finally {
-        fos.close();
-      }
-      FileUtil.copy(source, dest);
-
-      FileInputStream fis = new FileInputStream(dest);
-      DataInput dis = new DataInputStream(fis);
-      try {
-        for (long i = 0; i < FileUtil.MAX_TRANSFER_SIZE * 2.5 / 8; i++) {
-          assertEquals(i, dis.readLong());
-        }
-        assertEquals(-1, fis.read());
-      } finally {
-        fis.close();
-      }
-    } finally {
-      source.delete();
-      dest.delete();
-    }
-  }
-
-  @Test
-  public void testStripOffExtension() {
-    String fileName = "filename";
-    assertEquals("filename", FileUtil.stripOffExtension(fileName));
-    fileName = "filename.txt";
-    assertEquals("filename", FileUtil.stripOffExtension(fileName));
-    fileName = "filename.txt.txt";
-    assertEquals("filename.txt", FileUtil.stripOffExtension(fileName));
-    fileName = "filename.txt.log";
-    assertEquals("filename.txt", FileUtil.stripOffExtension(fileName));
-    fileName = "/dir/dir/dir/dir/filename.txt.log";
-    assertEquals("/dir/dir/dir/dir/filename.txt", FileUtil.stripOffExtension(fileName));
-  }
-
-  @Test
-  public void testDeleteFile() throws IOException {
-    File file = File.createTempFile("FileUtilJUnitTest", null);
-    assertTrue(file.exists());
-    FileUtil.delete(file);
-    assertFalse(file.exists());
-  }
-
-  @Test
-  public void testDeleteDir() throws IOException {
-    File dir = new File("testDirName");
-    dir.mkdir();
-    File file = File.createTempFile("testFile", null, dir);
-    assertTrue(dir.exists());
-    assertTrue(file.exists());
-    FileUtil.delete(dir);
-    assertFalse(file.exists());
-    assertFalse(dir.exists());
-  }
-}

http://git-wip-us.apache.org/repos/asf/geode/blob/29246ab7/geode-core/src/test/java/org/apache/geode/internal/JarDeployerDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/internal/JarDeployerDUnitTest.java b/geode-core/src/test/java/org/apache/geode/internal/JarDeployerDUnitTest.java
index 22f66a3..a365899 100644
--- a/geode-core/src/test/java/org/apache/geode/internal/JarDeployerDUnitTest.java
+++ b/geode-core/src/test/java/org/apache/geode/internal/JarDeployerDUnitTest.java
@@ -42,10 +42,12 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.RandomAccessFile;
 import java.nio.channels.FileLock;
+import java.nio.file.Files;
 import java.util.List;
 import java.util.Properties;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.atomic.AtomicReference;
+import java.util.regex.Pattern;
 
 /**
  * Unit tests for the JarDeployer class
@@ -515,9 +517,7 @@ public class JarDeployerDUnitTest extends JUnit4CacheTestCase {
   }
 
   FileLock acquireSharedLock(final File file) throws IOException {
-    @SuppressWarnings("resource")
-    FileLock fileLock = new FileInputStream(file).getChannel().lock(0, 1, true);
-    return fileLock;
+    return new FileInputStream(file).getChannel().lock(0, 1, true);
   }
 
   void releaseLock(final FileLock fileLock, final File lockFile) throws IOException {
@@ -544,32 +544,28 @@ public class JarDeployerDUnitTest extends JUnit4CacheTestCase {
     }
 
     // Open the file then loop comparing each byte
-    InputStream inStream = new FileInputStream(file);
     int index = 0;
-    try {
+    try (InputStream inStream = new FileInputStream(file)) {
       for (; index < bytes.length; index++) {
         if (((byte) inStream.read()) != bytes[index])
           break;
       }
-    } finally {
-      inStream.close();
     }
 
     // If we didn't get to the end then something was different
-    if (index < bytes.length)
-      return false;
-
-    return true;
+    return index >= bytes.length;
   }
 
   private void deleteSavedJarFiles() throws IOException {
-    FileUtil.deleteMatching(new File("."),
-        "^" + JarDeployer.JAR_PREFIX + "JarDeployerDUnit.*#\\d++$");
-    FileUtil.delete(new File("JarDeployerDUnit"));
+    Pattern pattern = Pattern.compile("^" + JarDeployer.JAR_PREFIX + "JarDeployerDUnit.*#\\d++$");
+    File[] files = new File(".").listFiles((dir1, name) -> pattern.matcher(name).matches());
+    if (files != null) {
+      for (File file : files) {
+        Files.delete(file.toPath());
+      }
+    }
   }
 
-
-
   void writeJarBytesToFile(File jarFile, byte[] jarBytes) throws IOException {
     final OutputStream outStream = new FileOutputStream(jarFile);
     outStream.write(jarBytes);


Mime
View raw message