hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sze...@apache.org
Subject svn commit: r1628814 - in /hive/trunk: ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ ql/src/test/org/apache/hadoop/hive/ql/metadata/ shims/0.20/src/main/java/org/apache/hadoop/hive/shims/ shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/ shims/...
Date Wed, 01 Oct 2014 19:18:02 GMT
Author: szehon
Date: Wed Oct  1 19:18:01 2014
New Revision: 1628814

URL: http://svn.apache.org/r1628814
Log:
HIVE-8265: Build failure on hadoop-1 (Navis and Szehon, reviewed by Vikram Dixit)

Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
    hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
    hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
    hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java?rev=1628814&r1=1628813&r2=1628814&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java Wed
Oct  1 19:18:01 2014
@@ -33,8 +33,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.plan.TezWork.VertexType;
 import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.DataInputByteBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.mapred.FileSplit;

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java?rev=1628814&r1=1628813&r2=1628814&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java Wed Oct  1 19:18:01
2014
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.hive.ql.metadata;
 
-import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_CHECKPOINT_INTERVAL_KEY;
-import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
 import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
 
 import java.util.ArrayList;
@@ -28,13 +26,13 @@ import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.regex.Pattern;
 
 import junit.framework.TestCase;
 
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.TrashPolicy;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
@@ -51,6 +49,7 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer;
 import org.apache.hadoop.hive.serde2.thrift.test.Complex;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
@@ -70,8 +69,8 @@ public class TestHive extends TestCase {
     super.setUp();
     hiveConf = new HiveConf(this.getClass());
     // enable trash so it can be tested
-    hiveConf.setFloat(FS_TRASH_CHECKPOINT_INTERVAL_KEY, 30);
-    hiveConf.setFloat(FS_TRASH_INTERVAL_KEY, 30);
+    hiveConf.setFloat("fs.trash.checkpoint.interval", 30);  // FS_TRASH_CHECKPOINT_INTERVAL_KEY
(hadoop-2)
+    hiveConf.setFloat("fs.trash.interval", 30);             // FS_TRASH_INTERVAL_KEY (hadoop-2)
     SessionState.start(hiveConf);
     try {
       hm = Hive.get(hiveConf);
@@ -89,8 +88,8 @@ public class TestHive extends TestCase {
     try {
       super.tearDown();
       // disable trash
-      hiveConf.setFloat(FS_TRASH_CHECKPOINT_INTERVAL_KEY, 30);
-      hiveConf.setFloat(FS_TRASH_INTERVAL_KEY, 30);
+      hiveConf.setFloat("fs.trash.checkpoint.interval", 30);  // FS_TRASH_CHECKPOINT_INTERVAL_KEY
(hadoop-2)
+      hiveConf.setFloat("fs.trash.interval", 30);             // FS_TRASH_INTERVAL_KEY (hadoop-2)
       Hive.closeCurrent();
     } catch (Exception e) {
       System.err.println(StringUtils.stringifyException(e));
@@ -355,6 +354,9 @@ public class TestHive extends TestCase {
   }
 
   public void testDropTableTrash() throws Throwable {
+    if (!ShimLoader.getHadoopShims().supportTrashFeature()) {
+      return; // it's hadoop-1
+    }
     try {
       String dbName = "db_for_testdroptable";
       hm.dropDatabase(dbName, true, true, true);
@@ -379,12 +381,9 @@ public class TestHive extends TestCase {
       FileSystem fs = path1.getFileSystem(hiveConf);
       assertTrue(fs.exists(path1));
       // drop table and check that trash works
-      TrashPolicy tp = TrashPolicy.getInstance(hiveConf, fs, fs.getHomeDirectory());
-      assertNotNull("TrashPolicy instance should not be null", tp);
-      assertTrue("TrashPolicy is not enabled for filesystem: " + fs.getUri(), tp.isEnabled());
-      Path trashDir = tp.getCurrentTrashDir();
+      Path trashDir = ShimLoader.getHadoopShims().getCurrentTrashPath(hiveConf, fs);
       assertNotNull("trash directory should not be null", trashDir);
-      Path trash1 = Path.mergePaths(trashDir, path1);
+      Path trash1 = mergePaths(trashDir, path1);
       Path pathglob = trash1.suffix("*");;
       FileStatus before[] = fs.globStatus(pathglob);
       hm.dropTable(dbName, ts.get(0));
@@ -399,7 +398,7 @@ public class TestHive extends TestCase {
       assertEquals(ts.get(1), table2.getTableName());
       Path path2 = table2.getPath();
       assertTrue(fs.exists(path2));
-      Path trash2 = Path.mergePaths(trashDir, path2);
+      Path trash2 = mergePaths(trashDir, path2);
       System.out.println("trashDir2 is " + trash2);
       pathglob = trash2.suffix("*");
       before = fs.globStatus(pathglob);
@@ -621,4 +620,39 @@ public class TestHive extends TestCase {
     newHiveObj = Hive.get(newHconf);
     assertTrue(prevHiveObj != newHiveObj);
   }
+
+  // shamelessly copied from Path in hadoop-2
+  private static final String SEPARATOR = "/";
+  private static final char SEPARATOR_CHAR = '/';
+
+  private static final String CUR_DIR = ".";
+
+  private static final boolean WINDOWS
+      = System.getProperty("os.name").startsWith("Windows");
+
+  private static final Pattern hasDriveLetterSpecifier =
+      Pattern.compile("^/?[a-zA-Z]:");
+
+  private static Path mergePaths(Path path1, Path path2) {
+    String path2Str = path2.toUri().getPath();
+    path2Str = path2Str.substring(startPositionWithoutWindowsDrive(path2Str));
+    // Add path components explicitly, because simply concatenating two path
+    // string is not safe, for example:
+    // "/" + "/foo" yields "//foo", which will be parsed as authority in Path
+    return new Path(path1.toUri().getScheme(),
+        path1.toUri().getAuthority(),
+        path1.toUri().getPath() + path2Str);
+  }
+
+  private static int startPositionWithoutWindowsDrive(String path) {
+    if (hasWindowsDrive(path)) {
+      return path.charAt(0) ==  SEPARATOR_CHAR ? 3 : 2;
+    } else {
+      return 0;
+    }
+  }
+
+  private static boolean hasWindowsDrive(String path) {
+    return (WINDOWS && hasDriveLetterSpecifier.matcher(path).find());
+  }
 }

Modified: hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1628814&r1=1628813&r2=1628814&view=diff
==============================================================================
--- hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Wed
Oct  1 19:18:01 2014
@@ -918,4 +918,14 @@ public class Hadoop20Shims implements Ha
   public boolean hasStickyBit(FsPermission permission) {
     return false;   // not supported
   }
+
+  @Override
+  public boolean supportTrashFeature() {
+    return false;
+  }
+
+  @Override
+  public Path getCurrentTrashPath(Configuration conf, FileSystem fs) {
+    return null;
+  }
 }

Modified: hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1628814&r1=1628813&r2=1628814&view=diff
==============================================================================
--- hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
(original)
+++ hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
Wed Oct  1 19:18:01 2014
@@ -534,6 +534,16 @@ public class Hadoop20SShims extends Hado
 
   @Override
   public boolean hasStickyBit(FsPermission permission) {
-    return false;   // not supported
+    return false;
+  }
+
+  @Override
+  public boolean supportTrashFeature() {
+    return false;
+  }
+
+  @Override
+  public Path getCurrentTrashPath(Configuration conf, FileSystem fs) {
+    return null;
   }
 }

Modified: hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1628814&r1=1628813&r2=1628814&view=diff
==============================================================================
--- hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Wed
Oct  1 19:18:01 2014
@@ -46,6 +46,7 @@ import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.fs.ProxyFileSystem;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.Trash;
+import org.apache.hadoop.fs.TrashPolicy;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclEntryScope;
 import org.apache.hadoop.fs.permission.AclEntryType;
@@ -835,4 +836,15 @@ public class Hadoop23Shims extends Hadoo
   public boolean hasStickyBit(FsPermission permission) {
     return permission.getStickyBit();
   }
+
+  @Override
+  public boolean supportTrashFeature() {
+    return true;
+  }
+
+  @Override
+  public Path getCurrentTrashPath(Configuration conf, FileSystem fs) {
+    TrashPolicy tp = TrashPolicy.getInstance(conf, fs, fs.getHomeDirectory());
+    return tp.getCurrentTrashDir();
+  }
 }

Modified: hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1628814&r1=1628813&r2=1628814&view=diff
==============================================================================
--- hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java Wed
Oct  1 19:18:01 2014
@@ -721,4 +721,14 @@ public interface HadoopShims {
    * @return sticky bit
    */
   boolean hasStickyBit(FsPermission permission);
+
+  /**
+   * @return True if the current hadoop supports trash feature.
+   */
+  boolean supportTrashFeature();
+
+  /**
+   * @return Path to HDFS trash, if current hadoop supports trash feature.  Null otherwise.
+   */
+  Path getCurrentTrashPath(Configuration conf, FileSystem fs);
 }



Mime
View raw message