hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ser...@apache.org
Subject [1/2] hive git commit: HIVE-12648 : LLAP IO was disabled in CliDriver by accident (and tests are broken) (Sergey Shelukhin, reviewed by Prasanth Jayachandran)
Date Sat, 12 Dec 2015 00:10:30 GMT
Repository: hive
Updated Branches:
  refs/heads/branch-2.0 728c18e92 -> c692e2e8a
  refs/heads/master 747384b13 -> b187d42b2


HIVE-12648 : LLAP IO was disabled in CliDriver by accident (and tests are broken) (Sergey
Shelukhin, reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c692e2e8
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c692e2e8
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c692e2e8

Branch: refs/heads/branch-2.0
Commit: c692e2e8a4f67ff357db27be139b8fb32e6801bb
Parents: 728c18e
Author: Sergey Shelukhin <sershe@apache.org>
Authored: Fri Dec 11 16:09:00 2015 -0800
Committer: Sergey Shelukhin <sershe@apache.org>
Committed: Fri Dec 11 16:09:00 2015 -0800

----------------------------------------------------------------------
 .../hadoop/hive/ql/TestLocationQueries.java     |  2 +-
 .../hadoop/hive/accumulo/AccumuloQTestUtil.java |  2 +-
 .../hadoop/hive/hbase/HBaseQTestUtil.java       |  2 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java    | 28 ++++---------
 .../llap/io/encoded/OrcEncodedDataReader.java   | 43 +++++++++++---------
 .../org/apache/hadoop/hive/ql/io/HdfsUtils.java |  8 +++-
 .../hadoop/hive/ql/io/HiveInputFormat.java      |  2 +-
 ql/src/test/templates/TestCliDriver.vm          |  2 +-
 ql/src/test/templates/TestCompareCliDriver.vm   |  2 +-
 ql/src/test/templates/TestNegativeCliDriver.vm  |  4 +-
 ql/src/test/templates/TestParseNegative.vm      |  4 +-
 11 files changed, 49 insertions(+), 50 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/c692e2e8/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
index 6ee98cb..0688846 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
@@ -88,7 +88,7 @@ public class TestLocationQueries extends BaseTestQueries {
         String hadoopVer, String locationSubdir)
       throws Exception
     {
-      super(outDir, logDir, miniMr, null, hadoopVer, "", "");
+      super(outDir, logDir, miniMr, null, hadoopVer, "", "", false, false);
       this.locationSubdir = locationSubdir;
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/c692e2e8/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java
b/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java
index b83543a..88bc0bc 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java
@@ -26,7 +26,7 @@ public class AccumuloQTestUtil extends QTestUtil {
   public AccumuloQTestUtil(String outDir, String logDir, MiniClusterType miniMr,
       AccumuloTestSetup setup, String initScript, String cleanupScript) throws Exception
{
 
-    super(outDir, logDir, miniMr, null, initScript, cleanupScript);
+    super(outDir, logDir, miniMr, null, "0.20", initScript, cleanupScript, false, false);
     setup.setupWithHiveConf(conf);
     super.init();
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/c692e2e8/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
index 59fab2c..3ff5742 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
@@ -44,7 +44,7 @@ public class HBaseQTestUtil extends QTestUtil {
     String initScript, String cleanupScript)
     throws Exception {
 
-    super(outDir, logDir, miniMr, null, initScript, cleanupScript);
+    super(outDir, logDir, miniMr, null, "0.20", initScript, cleanupScript, false, false);
     setup.preTest(conf);
     this.conn = setup.getConnection();
     super.init();

http://git-wip-us.apache.org/repos/asf/hive/blob/c692e2e8/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index f805087..928a071 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -73,6 +73,7 @@ import org.apache.hadoop.hive.common.io.SortAndDigestPrintStream;
 import org.apache.hadoop.hive.common.io.SortPrintStream;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.llap.io.api.LlapProxy;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.Index;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -254,11 +255,6 @@ public class QTestUtil {
     }
   }
 
-  public QTestUtil(String outDir, String logDir, String initScript, String cleanupScript)
throws
-      Exception {
-    this(outDir, logDir, MiniClusterType.none, null, "0.20", initScript, cleanupScript);
-  }
-
   public String getOutputDirectory() {
     return outDir;
   }
@@ -341,12 +337,6 @@ public class QTestUtil {
     }
   }
 
-  public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, String hadoopVer,
-                   String initScript, String cleanupScript)
-    throws Exception {
-    this(outDir, logDir, clusterType, null, hadoopVer, initScript, cleanupScript);
-  }
-
   private String getKeyProviderURI() {
     // Use the target directory if it is not specified
     String HIVE_ROOT = QTestUtil.ensurePathEndsInSlash(System.getProperty("hive.root"));
@@ -373,13 +363,8 @@ public class QTestUtil {
   }
 
   public QTestUtil(String outDir, String logDir, MiniClusterType clusterType,
-      String confDir, String hadoopVer, String initScript, String cleanupScript)
-    throws Exception {
-    this(outDir, logDir, clusterType, confDir, hadoopVer, initScript, cleanupScript, false);
-  }
-
-  public QTestUtil(String outDir, String logDir, MiniClusterType clusterType,
-      String confDir, String hadoopVer, String initScript, String cleanupScript, boolean
useHBaseMetastore)
+      String confDir, String hadoopVer, String initScript, String cleanupScript,
+      boolean useHBaseMetastore, boolean withLlapIo)
     throws Exception {
     this.outDir = outDir;
     this.logDir = logDir;
@@ -452,6 +437,11 @@ public class QTestUtil {
     }
 
     initConf();
+    if (withLlapIo && clusterType == MiniClusterType.none) {
+      LOG.info("initializing llap IO");
+      LlapProxy.initializeLlapIo(conf);
+    }
+
 
     // Use the current directory if it is not specified
     String dataDir = conf.get("test.data.files");
@@ -1772,7 +1762,7 @@ public class QTestUtil {
     QTestUtil[] qt = new QTestUtil[qfiles.length];
     for (int i = 0; i < qfiles.length; i++) {
       qt[i] = new QTestUtil(resDir, logDir, MiniClusterType.none, null, "0.20",
-          defaultInitScript, defaultCleanupScript);
+          defaultInitScript, defaultCleanupScript, false, false);
       qt[i].addFile(qfiles[i]);
       qt[i].clearTestSideEffects();
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/c692e2e8/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
index 729f1bd..5957d57 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
@@ -338,7 +338,7 @@ public class OrcEncodedDataReader extends CallableWithNdc<Void>
     // TODO: I/O threadpool could be here - one thread per stripe; for now, linear.
     boolean hasFileId = this.fileId != null;
     long fileId = hasFileId ? this.fileId : 0;
-    OrcBatchKey stripeKey = new OrcBatchKey(fileId, -1, 0);
+    OrcBatchKey stripeKey = hasFileId ? new OrcBatchKey(fileId, -1, 0) : null;
     for (int stripeIxMod = 0; stripeIxMod < readState.length; ++stripeIxMod) {
       if (processStop()) {
         cleanupReaders();
@@ -382,8 +382,8 @@ public class OrcEncodedDataReader extends CallableWithNdc<Void>
         if (stripeMetadatas != null) {
           stripeMetadata = stripeMetadatas.get(stripeIxMod);
         } else {
-          stripeKey.stripeIx = stripeIx;
           if (hasFileId) {
+            stripeKey.stripeIx = stripeIx;
             stripeMetadata = metadataCache.getStripeMetadata(stripeKey);
           }
           isFoundInCache = (stripeMetadata != null);
@@ -396,12 +396,13 @@ public class OrcEncodedDataReader extends CallableWithNdc<Void>
             counters.incrTimeCounter(Counter.HDFS_TIME_US, startTimeHdfs);
             if (hasFileId) {
               stripeMetadata = metadataCache.putStripeMetadata(stripeMetadata);
+              if (DebugUtils.isTraceOrcEnabled()) {
+                LlapIoImpl.LOG.info("Caching stripe " + stripeKey.stripeIx
+                    + " metadata with includes: " + DebugUtils.toString(stripeIncludes));
+              }
+              stripeKey = new OrcBatchKey(fileId, -1, 0);
             }
-            if (DebugUtils.isTraceOrcEnabled()) {
-              LlapIoImpl.LOG.info("Caching stripe " + stripeKey.stripeIx
-                  + " metadata with includes: " + DebugUtils.toString(stripeIncludes));
-            }
-            stripeKey = new OrcBatchKey(fileId, -1, 0);
+
           }
           consumer.setStripeMetadata(stripeMetadata);
         }
@@ -600,7 +601,7 @@ public class OrcEncodedDataReader extends CallableWithNdc<Void>
     if (orcReader != null) return;
     Path path = split.getPath();
     if (fileId != null && HiveConf.getBoolVar(conf, ConfVars.LLAP_IO_USE_FILEID_PATH))
{
-      path = HdfsUtils.getFileIdPath(fs, split.getPath(), fileId);
+      path = HdfsUtils.getFileIdPath(fs, path, fileId);
     }
     if (DebugUtils.isTraceOrcEnabled()) {
       LOG.info("Creating reader for " + path + " (" + split.getPath() + ")");
@@ -626,7 +627,7 @@ public class OrcEncodedDataReader extends CallableWithNdc<Void>
     }
     ensureOrcReader();
     // We assume this call doesn't touch HDFS because everything is already read; don't add
time.
-    metadata = new OrcFileMetadata(fileId == null ? fileId : 0, orcReader);
+    metadata = new OrcFileMetadata(fileId != null ? fileId : 0, orcReader);
     return (fileId == null) ? metadata : metadataCache.putFileMetadata(metadata);
   }
 
@@ -638,27 +639,31 @@ public class OrcEncodedDataReader extends CallableWithNdc<Void>
     ArrayList<OrcStripeMetadata> result = new ArrayList<OrcStripeMetadata>(readState.length);
     boolean hasFileId = this.fileId != null;
     long fileId = hasFileId ? this.fileId : 0;
-    OrcBatchKey stripeKey = new OrcBatchKey(fileId, 0, 0);
+    OrcBatchKey stripeKey = hasFileId ? new OrcBatchKey(fileId, 0, 0) : null;
     for (int stripeIxMod = 0; stripeIxMod < readState.length; ++stripeIxMod) {
-      stripeKey.stripeIx = stripeIxMod + stripeIxFrom;
-      OrcStripeMetadata value = hasFileId ? metadataCache.getStripeMetadata(stripeKey) :
null;
+      OrcStripeMetadata value = null;
+      int stripeIx = stripeIxMod + stripeIxFrom;
+      if (hasFileId) {
+        stripeKey.stripeIx = stripeIx;
+        value = metadataCache.getStripeMetadata(stripeKey);
+      }
       if (value == null || !value.hasAllIndexes(globalInc)) {
         counters.incrCounter(Counter.METADATA_CACHE_MISS);
         ensureMetadataReader();
-        StripeInformation si = fileMetadata.getStripes().get(stripeKey.stripeIx);
+        StripeInformation si = fileMetadata.getStripes().get(stripeIx);
         if (value == null) {
           long startTime = counters.startTimeCounter();
           value = new OrcStripeMetadata(stripeKey, metadataReader, si, globalInc, sargColumns);
           counters.incrTimeCounter(Counter.HDFS_TIME_US, startTime);
           if (hasFileId) {
             value = metadataCache.putStripeMetadata(value);
+            if (DebugUtils.isTraceOrcEnabled()) {
+              LlapIoImpl.LOG.info("Caching stripe " + stripeKey.stripeIx
+                  + " metadata with includes: " + DebugUtils.toString(globalInc));
+            }
+            // Create new key object to reuse for gets; we've used the old one to put in
cache.
+            stripeKey = new OrcBatchKey(fileId, 0, 0);
           }
-          if (DebugUtils.isTraceOrcEnabled()) {
-            LlapIoImpl.LOG.info("Caching stripe " + stripeKey.stripeIx
-                + " metadata with includes: " + DebugUtils.toString(globalInc));
-          }
-          // Create new key object to reuse for gets; we've used the old one to put in cache.
-          stripeKey = new OrcBatchKey(fileId, 0, 0);
         }
         // We might have got an old value from cache; recheck it has indexes.
         if (!value.hasAllIndexes(globalInc)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/c692e2e8/ql/src/java/org/apache/hadoop/hive/ql/io/HdfsUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HdfsUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HdfsUtils.java
index 58bf9b6..af64fc8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HdfsUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HdfsUtils.java
@@ -39,7 +39,11 @@ public class HdfsUtils {
     if (fileSystem instanceof DistributedFileSystem) {
       return SHIMS.getFileId(fileSystem, pathStr);
     }
-    if (!allowSynthetic) return null;
+    if (!allowSynthetic) {
+      LOG.warn("Cannot get unique file ID from "
+        + fileSystem.getClass().getSimpleName() + "; returning null");
+      return null;
+    }
     // If we are not on DFS, we just hash the file name + size and hope for the best.
     // TODO: we assume it only happens in tests. Fix?
     int nameHash = pathStr.hashCode();
@@ -50,7 +54,7 @@ public class HdfsUtils {
         combinedHash = modTimeHash ^ fileSizeHash;
     long id = (((long)nameHash & 0xffffffffL) << 32) | ((long)combinedHash &
0xffffffffL);
     LOG.warn("Cannot get unique file ID from "
-        + fileSystem.getClass().getSimpleName() + "; using " + id + "(" + pathStr
+        + fileSystem.getClass().getSimpleName() + "; using " + id + " (" + pathStr
         + "," + nameHash + "," + fileSize + ")");
     return id;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/c692e2e8/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
index bdf5dc2..2607d9c 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
@@ -216,7 +216,7 @@ public class HiveInputFormat<K extends WritableComparable, V extends
Writable>
     @SuppressWarnings("unchecked")
     LlapIo<VectorizedRowBatch> llapIo = LlapProxy.getIo();
     if (llapIo == null) {
-      LOG.info("Not using LLAP because IO is not initialized");
+      LOG.info("Not using LLAP IO because it is not initialized");
       return inputFormat;
     }
     return castInputFormat(llapIo.getInputFormat(inputFormat));

http://git-wip-us.apache.org/repos/asf/hive/blob/c692e2e8/ql/src/test/templates/TestCliDriver.vm
----------------------------------------------------------------------
diff --git a/ql/src/test/templates/TestCliDriver.vm b/ql/src/test/templates/TestCliDriver.vm
index 01745da..974d2c2 100644
--- a/ql/src/test/templates/TestCliDriver.vm
+++ b/ql/src/test/templates/TestCliDriver.vm
@@ -52,7 +52,7 @@ public class $className extends TestCase {
         hiveConfDir = HIVE_ROOT + hiveConfDir;
       }
       qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR,
-      hiveConfDir, hadoopVer, initScript, cleanupScript, useHBaseMetastore);
+      hiveConfDir, hadoopVer, initScript, cleanupScript, useHBaseMetastore, true);
 
       // do a one time initialization
       qt.cleanUp();

http://git-wip-us.apache.org/repos/asf/hive/blob/c692e2e8/ql/src/test/templates/TestCompareCliDriver.vm
----------------------------------------------------------------------
diff --git a/ql/src/test/templates/TestCompareCliDriver.vm b/ql/src/test/templates/TestCompareCliDriver.vm
index 7f849e0..c2ba5bc 100644
--- a/ql/src/test/templates/TestCompareCliDriver.vm
+++ b/ql/src/test/templates/TestCompareCliDriver.vm
@@ -52,7 +52,7 @@ public class $className extends TestCase {
         hiveConfDir = HIVE_ROOT + hiveConfDir;
       }
       qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR,
-      hiveConfDir, hadoopVer, initScript, cleanupScript);
+      hiveConfDir, hadoopVer, initScript, cleanupScript, false, false);
 
       // do a one time initialization
       qt.cleanUp();

http://git-wip-us.apache.org/repos/asf/hive/blob/c692e2e8/ql/src/test/templates/TestNegativeCliDriver.vm
----------------------------------------------------------------------
diff --git a/ql/src/test/templates/TestNegativeCliDriver.vm b/ql/src/test/templates/TestNegativeCliDriver.vm
index 5f8ee8e..6d8e16c 100644
--- a/ql/src/test/templates/TestNegativeCliDriver.vm
+++ b/ql/src/test/templates/TestNegativeCliDriver.vm
@@ -46,8 +46,8 @@ public class $className extends TestCase {
 
     try {
       String hadoopVer = "$hadoopVersion";
-      qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR, hadoopVer,
-       initScript, cleanupScript);
+      qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR, null,
hadoopVer,
+       initScript, cleanupScript, false, false);
       // do a one time initialization
       qt.cleanUp();
       qt.createSources();

http://git-wip-us.apache.org/repos/asf/hive/blob/c692e2e8/ql/src/test/templates/TestParseNegative.vm
----------------------------------------------------------------------
diff --git a/ql/src/test/templates/TestParseNegative.vm b/ql/src/test/templates/TestParseNegative.vm
index c5e7bdf..119f749 100755
--- a/ql/src/test/templates/TestParseNegative.vm
+++ b/ql/src/test/templates/TestParseNegative.vm
@@ -47,8 +47,8 @@ public class $className extends TestCase {
 
     try {
       String hadoopVer = "$hadoopVersion";
-      qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR, hadoopVer,
-       initScript, cleanupScript);
+      qt = new QTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + "$logDir"), miniMR, null,
hadoopVer,
+       initScript, cleanupScript, false, false);
     } catch (Exception e) {
       System.err.println("Exception: " + e.getMessage());
       e.printStackTrace();


Mime
View raw message