hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From xu...@apache.org
Subject svn commit: r1596977 - in /hive/trunk: common/src/java/org/apache/hadoop/hive/common/FileUtils.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestFolderPermissions.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
Date Thu, 22 May 2014 22:18:12 GMT
Author: xuefu
Date: Thu May 22 22:18:11 2014
New Revision: 1596977

URL: http://svn.apache.org/r1596977
Log:
HIVE-7092: Insert overwrite should not delete the original directory (Szehon via Xuefu)

Modified:
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestFolderPermissions.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java?rev=1596977&r1=1596976&r2=1596977&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java Thu May 22 22:18:11
2014
@@ -39,6 +39,8 @@ import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.shims.HadoopShims;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Shell;
 
@@ -48,6 +50,19 @@ import org.apache.hadoop.util.Shell;
  */
 public final class FileUtils {
   private static final Log LOG = LogFactory.getLog(FileUtils.class.getName());
+
+  /**
+   * Accept all paths.
+   */
+  private static class AcceptAllPathFilter implements PathFilter {
+    @Override
+    public boolean accept(Path path) {
+      return true;
+    }
+  }
+
+  private static final PathFilter allPathFilter = new AcceptAllPathFilter();
+
   /**
    * Variant of Path.makeQualified that qualifies the input path against the default file
system
    * indicated by the configuration
@@ -524,4 +539,53 @@ public final class FileUtils {
     }
     return copied;
   }
+
+  /**
+   * Deletes all files under a directory, sending them to the trash.  Leaves the directory
as is.
+   * @param fs FileSystem to use
+   * @param f path of directory
+   * @param conf hive configuration
+   * @return true if deletion successful
+   * @throws FileNotFoundException
+   * @throws IOException
+   */
+  public static boolean trashFilesUnderDir(FileSystem fs, Path f, Configuration conf) throws
FileNotFoundException, IOException {
+    FileStatus[] statuses = fs.listStatus(f, allPathFilter);
+    boolean result = true;
+    for (FileStatus status : statuses) {
+      result = result & moveToTrash(fs, status.getPath(), conf);
+    }
+    return result;
+  }
+
+  /**
+   * Move a particular file or directory to the trash.
+   * @param fs FileSystem to use
+   * @param f path of file or directory to move to trash.
+   * @param conf
+   * @return true if move successful
+   * @throws IOException
+   */
+  public static boolean moveToTrash(FileSystem fs, Path f, Configuration conf) throws IOException
{
+    LOG.info("deleting  " + f);
+    HadoopShims hadoopShim = ShimLoader.getHadoopShims();
+
+    boolean skipTrash = HiveConf.getBoolVar(conf,
+        HiveConf.ConfVars.HIVE_WAREHOUSE_DATA_SKIPTRASH);
+
+    if (skipTrash) {
+      LOG.info("Not moving "+ f +" to trash due to configuration " +
+        HiveConf.ConfVars.HIVE_WAREHOUSE_DATA_SKIPTRASH + " is set to true.");
+    } else if (hadoopShim.moveToAppropriateTrash(fs, f, conf)) {
+      LOG.info("Moved to trash: " + f);
+      return true;
+    }
+
+    boolean result = fs.delete(f, true);
+    if (!result) {
+      LOG.error("Failed to delete " + f);
+    }
+    return result;
+  }
+
 }

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestFolderPermissions.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestFolderPermissions.java?rev=1596977&r1=1596976&r2=1596977&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestFolderPermissions.java
(original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestFolderPermissions.java
Thu May 22 22:18:11 2014
@@ -224,6 +224,48 @@ public class TestFolderPermissions {
   }
 
   @Test
+  public void testInsertOverwrite() throws Exception {
+    //case 1 is non-partitioned table.
+    String tableName = "insertoverwrite";
+
+    CommandProcessorResponse ret = driver.run("CREATE TABLE " + tableName + " (key string,
value string)");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    String tableLoc = testDir + "/" + tableName;
+    assertExistence(testDir + "/" + tableName);
+    setPermissions(testDir + "/" + tableName, FsPermission.createImmutable((short) 0777));
+
+    ret = driver.run("insert overwrite table " + tableName + " select key,value from mysrc");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    Assert.assertTrue(listChildrenPerms(tableLoc).size() > 0);
+    for (FsPermission perm : listChildrenPerms(tableLoc)) {
+      Assert.assertEquals("rwxrwxrwx", perm.toString());
+    }
+
+    //case 2 is partitioned table.
+    tableName = "insertoverwritepartition";
+
+    ret = driver.run("CREATE TABLE " + tableName + " (key string, value string) partitioned
by (part1 int, part2 int)");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    ret = driver.run("insert overwrite table " + tableName + " partition(part1='1',part2='1')
select key,value from mysrc");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    String partLoc = testDir + "/" + tableName + "/part1=1/part2=1";
+    assertExistence(partLoc);
+    setPermissions(partLoc, FsPermission.createImmutable((short) 0777));
+
+    ret = driver.run("insert overwrite table " + tableName + " partition(part1='1',part2='1')
select key,value from mysrc");
+    Assert.assertEquals(0,ret.getResponseCode());
+
+    Assert.assertTrue(listChildrenPerms(tableLoc).size() > 0);
+    for (FsPermission perm : listChildrenPerms(tableLoc)) {
+      Assert.assertEquals("rwxrwxrwx", perm.toString());
+    }
+  }
+
+  @Test
   public void testEximPermissionInheritance() throws Exception {
 
     //export the table to external file.

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=1596977&r1=1596976&r2=1596977&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Thu May 22 22:18:11
2014
@@ -2392,16 +2392,11 @@ private void constructOneLBLocationMap(F
         try {
           FileSystem fs2 = oldPath.getFileSystem(conf);
           if (fs2.exists(oldPath)) {
-            // use FsShell to move data to .Trash first rather than delete permanently
-            FsShell fshell = new FsShell();
-            fshell.setConf(conf);
-            String[] rmr = isHadoop1() ? new String[]{"-rmr", oldPath.toString()} :
-                new String[]{"-rm", "-r", oldPath.toString()};
-            fshell.run(rmr);
+            FileUtils.trashFilesUnderDir(fs2, oldPath, conf);
           }
         } catch (Exception e) {
           //swallow the exception
-          LOG.warn("Directory " + oldPath.toString() + " canot be removed.");
+          LOG.warn("Directory " + oldPath.toString() + " canot be removed:" + StringUtils.stringifyException(e));
         }
       }
 



Mime
View raw message