hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From na...@apache.org
Subject svn commit: r1645550 - in /hive/trunk: common/src/java/org/apache/hadoop/hive/common/ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/hooks/ ql/src/java/org/apache/h...
Date Mon, 15 Dec 2014 02:25:07 GMT
Author: navis
Date: Mon Dec 15 02:25:06 2014
New Revision: 1645550

URL: http://svn.apache.org/r1645550
Log:
HIVE-8357 : Path type entities should use qualified path rather than string (Navis reviwed
by Thejas Nair

Modified:
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/test/results/clientnegative/udf_local_resource.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out
    hive/trunk/ql/src/test/results/clientpositive/add_part_multiple.q.out
    hive/trunk/ql/src/test/results/clientpositive/alter2.q.out
    hive/trunk/ql/src/test/results/clientpositive/alter5.q.out
    hive/trunk/ql/src/test/results/clientpositive/exim_17_part_managed.q.out

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java Mon Dec 15 02:25:06
2014
@@ -448,7 +448,7 @@ public final class FileUtils {
    */
   public static boolean isLocalFile(HiveConf conf, String fileName) {
     try {
-      // do best effor to determine if this is a local file
+      // do best effort to determine if this is a local file
       return isLocalFile(conf, new URI(fileName));
     } catch (URISyntaxException e) {
       LOG.warn("Unable to create URI from " + fileName, e);
@@ -464,7 +464,7 @@ public final class FileUtils {
    */
   public static boolean isLocalFile(HiveConf conf, URI fileUri) {
     try {
-      // do best effor to determine if this is a local file
+      // do best effort to determine if this is a local file
       FileSystem fsForFile = FileSystem.get(fileUri, conf);
       return LocalFileSystem.class.isInstance(fsForFile);
     } catch (IOException e) {

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
(original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
Mon Dec 15 02:25:06 2014
@@ -2,14 +2,9 @@ package org.apache.hadoop.hive.ql;
 
 import static org.junit.Assert.*;
 
-import java.net.URI;
-import java.util.Set;
-
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.hooks.Entity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.junit.After;
 import org.junit.Before;
@@ -50,7 +45,7 @@ public class TestCreateUdfEntities {
     assertEquals(funcName, outputEntities[1].getFunctionName());
 
     assertEquals(Entity.Type.LOCAL_DIR, outputEntities[2].getType());
-    assertEquals("file:/tmp/udf1.jar", outputEntities[2].getLocation().toString());
+    assertEquals("file:///tmp/udf1.jar", outputEntities[2].getLocation().toString());
   }
 
   @Test
@@ -68,7 +63,7 @@ public class TestCreateUdfEntities {
     assertEquals(funcName, outputEntities[1].getFunctionName());
 
     assertEquals(Entity.Type.DFS_DIR, outputEntities[2].getType());
-    assertEquals("hdfs:/tmp/udf1.jar", outputEntities[2].getLocation().toString());
+    assertEquals("hdfs:///tmp/udf1.jar", outputEntities[2].getLocation().toString());
   }
 
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Mon Dec 15 02:25:06 2014
@@ -114,7 +114,6 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobClient;
@@ -152,7 +151,7 @@ public class Driver implements CommandPr
 
   private String userName;
 
-  private boolean checkConcurrency() throws SemanticException {
+  private boolean checkConcurrency() {
     boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
     if (!supportConcurrency) {
       LOG.info("Concurrency mode is disabled, not creating a lock manager");
@@ -535,8 +534,8 @@ public class Driver implements CommandPr
    * Do authorization using post semantic analysis information in the semantic analyzer
    * The original command is also passed so that authorization interface can provide
    * more useful information in logs.
-   * @param sem
-   * @param command
+   * @param sem SemanticAnalyzer used to parse input query
+   * @param command input query
    * @throws HiveException
    * @throws AuthorizationException
    */
@@ -583,7 +582,7 @@ public class Driver implements CommandPr
     }
     if (outputs != null && outputs.size() > 0) {
       for (WriteEntity write : outputs) {
-        if (write.isDummy()) {
+        if (write.isDummy() || write.isPathType()) {
           continue;
         }
         if (write.getType() == Entity.Type.DATABASE) {
@@ -616,7 +615,7 @@ public class Driver implements CommandPr
       //determine if partition level privileges should be checked for input tables
       Map<String, Boolean> tableUsePartLevelAuth = new HashMap<String, Boolean>();
       for (ReadEntity read : inputs) {
-        if (read.isDummy() || read.getType() == Entity.Type.DATABASE) {
+        if (read.isDummy() || read.isPathType() || read.getType() == Entity.Type.DATABASE)
{
           continue;
         }
         Table tbl = read.getTable();
@@ -643,7 +642,7 @@ public class Driver implements CommandPr
       // cache the results for table authorization
       Set<String> tableAuthChecked = new HashSet<String>();
       for (ReadEntity read : inputs) {
-        if (read.isDummy()) {
+        if (read.isDummy() || read.isPathType()) {
           continue;
         }
         if (read.getType() == Entity.Type.DATABASE) {
@@ -804,7 +803,7 @@ public class Driver implements CommandPr
         break;
       case DFS_DIR:
       case LOCAL_DIR:
-        objName = privObject.getD();
+        objName = privObject.getD().toString();
         break;
       case FUNCTION:
         if(privObject.getDatabase() != null) {
@@ -1151,20 +1150,6 @@ public class Driver implements CommandPr
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DRIVER_RUN);
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.TIME_TO_SUBMIT);
 
-    boolean requireLock = false;
-    boolean ckLock = false;
-    SessionState ss = SessionState.get();
-    try {
-      ckLock = checkConcurrency();
-    } catch (SemanticException e) {
-      errorMessage = "FAILED: Error in semantic analysis: " + e.getMessage();
-      SQLState = ErrorMsg.findSQLState(e.getMessage());
-      downstreamError = e;
-      console.printError(errorMessage, "\n"
-          + org.apache.hadoop.util.StringUtils.stringifyException(e));
-      return createProcessorResponse(10);
-    }
-
     int ret;
     if (!alreadyCompiled) {
       ret = compileInternal(command);
@@ -1176,34 +1161,9 @@ public class Driver implements CommandPr
     // the reason that we set the txn manager for the cxt here is because each
     // query has its own ctx object. The txn mgr is shared across the
     // same instance of Driver, which can run multiple queries.
-    ctx.setHiveTxnManager(ss.getTxnMgr());
+    ctx.setHiveTxnManager(SessionState.get().getTxnMgr());
 
-    if (ckLock) {
-      boolean lockOnlyMapred = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_LOCK_MAPRED_ONLY);
-      if(lockOnlyMapred) {
-        Queue<Task<? extends Serializable>> taskQueue = new LinkedList<Task<?
extends Serializable>>();
-        taskQueue.addAll(plan.getRootTasks());
-        while (taskQueue.peek() != null) {
-          Task<? extends Serializable> tsk = taskQueue.remove();
-          requireLock = requireLock || tsk.requireLock();
-          if(requireLock) {
-            break;
-          }
-          if (tsk instanceof ConditionalTask) {
-            taskQueue.addAll(((ConditionalTask)tsk).getListTasks());
-          }
-          if(tsk.getChildTasks()!= null) {
-            taskQueue.addAll(tsk.getChildTasks());
-          }
-          // does not add back up task here, because back up task should be the same
-          // type of the original task.
-        }
-      } else {
-        requireLock = true;
-      }
-    }
-
-    if (requireLock) {
+    if (requiresLock()) {
       ret = acquireLocksAndOpenTxn();
       if (ret != 0) {
         try {
@@ -1258,6 +1218,32 @@ public class Driver implements CommandPr
     return createProcessorResponse(ret);
   }
 
+  private boolean requiresLock() {
+    if (!checkConcurrency()) {
+      return false;
+    }
+    if (!HiveConf.getBoolVar(conf, ConfVars.HIVE_LOCK_MAPRED_ONLY)) {
+      return true;
+    }
+    Queue<Task<? extends Serializable>> taskQueue = new LinkedList<Task<?
extends Serializable>>();
+    taskQueue.addAll(plan.getRootTasks());
+    while (taskQueue.peek() != null) {
+      Task<? extends Serializable> tsk = taskQueue.remove();
+      if (tsk.requireLock()) {
+        return true;
+      }
+      if (tsk instanceof ConditionalTask) {
+        taskQueue.addAll(((ConditionalTask)tsk).getListTasks());
+      }
+      if (tsk.getChildTasks()!= null) {
+        taskQueue.addAll(tsk.getChildTasks());
+      }
+      // does not add back up task here, because back up task should be the same
+      // type of the original task.
+    }
+    return false;
+  }
+
   private CommandProcessorResponse createProcessorResponse(int ret) {
     return new CommandProcessorResponse(ret, errorMessage, SQLState, downstreamError);
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java Mon Dec 15 02:25:06
2014
@@ -67,7 +67,7 @@ public class Entity implements Serializa
   /**
    * The directory if this is a directory
    */
-  private String d;
+  private Path d;
 
   /**
    * An object that is represented as a String
@@ -135,11 +135,11 @@ public class Entity implements Serializa
     this.p = p;
   }
 
-  public String getD() {
+  public Path getD() {
     return d;
   }
 
-  public void setD(String d) {
+  public void setD(Path d) {
     this.d = d;
   }
 
@@ -218,7 +218,7 @@ public class Entity implements Serializa
     this.complete = complete;
   }
 
-  public Entity(String d, boolean islocal, boolean complete) {
+  public Entity(Path d, boolean islocal, boolean complete) {
     this.d = d;
     p = null;
     t = null;
@@ -267,6 +267,10 @@ public class Entity implements Serializa
     return typ;
   }
 
+  public boolean isPathType() {
+    return typ == Type.DFS_DIR || typ == Type.LOCAL_DIR;
+  }
+
   /**
    * Get the location of the entity.
    */
@@ -287,7 +291,7 @@ public class Entity implements Serializa
     }
 
     if (typ == Type.DFS_DIR || typ == Type.LOCAL_DIR) {
-      return new URI(d);
+      return d.toUri();
     }
 
     return null;
@@ -341,7 +345,7 @@ public class Entity implements Serializa
       }
       return stringObject;
     default:
-      return d;
+      return d.toString();
     }
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java Mon Dec 15 02:25:06
2014
@@ -128,7 +128,7 @@ public class ReadEntity extends Entity i
    *          Flag to decide whether this directory is local or in dfs.
    */
   public ReadEntity(Path d, boolean islocal) {
-    super(d.toString(), islocal, true);
+    super(d, islocal, true);
   }
 
   public Set<ReadEntity> getParents() {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java Mon Dec 15 02:25:06
2014
@@ -133,7 +133,7 @@ public class WriteEntity extends Entity
    *          True if this is a temporary location such as scratch dir
    */
   public WriteEntity(Path d, boolean islocal, boolean isTemp) {
-    super(d.toString(), islocal, true);
+    super(d, islocal, true);
     this.isTempURI = isTemp;
     this.writeType = WriteType.PATH_WRITE;
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Mon Dec
15 02:25:06 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
+import java.io.IOException;
 import java.io.Serializable;
 import java.io.UnsupportedEncodingException;
 import java.sql.Date;
@@ -38,6 +39,8 @@ import org.antlr.runtime.tree.Tree;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStore;
 import org.apache.hadoop.hive.metastore.api.Database;
@@ -1276,6 +1279,40 @@ public abstract class BaseSemanticAnalyz
     return HiveMetaStore.PARTITION_DATE_FORMAT.get().format(value);
   }
 
+  protected WriteEntity toWriteEntity(String location) throws SemanticException {
+    return toWriteEntity(new Path(location));
+  }
+
+  protected WriteEntity toWriteEntity(Path location) throws SemanticException {
+    try {
+      Path path = tryQualifyPath(location);
+      return new WriteEntity(path, FileUtils.isLocalFile(conf, path.toUri()));
+    } catch (Exception e) {
+      throw new SemanticException(e);
+    }
+  }
+
+  protected ReadEntity toReadEntity(String location) throws SemanticException {
+    return toReadEntity(new Path(location));
+  }
+
+  protected ReadEntity toReadEntity(Path location) throws SemanticException {
+    try {
+      Path path = tryQualifyPath(location);
+      return new ReadEntity(path, FileUtils.isLocalFile(conf, path.toUri()));
+    } catch (Exception e) {
+      throw new SemanticException(e);
+    }
+  }
+
+  private Path tryQualifyPath(Path path) throws IOException {
+    try {
+      return path.getFileSystem(conf).makeQualified(path);
+    } catch (IOException e) {
+      return path;  // some tests expected to pass invalid schema
+    }
+  }
+
   protected Database getDatabase(String dbName) throws SemanticException {
     return getDatabase(dbName, true);
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Mon Dec
15 02:25:06 2014
@@ -45,7 +45,6 @@ import org.antlr.runtime.tree.Tree;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
@@ -2740,17 +2739,7 @@ public class DDLSemanticAnalyzer extends
           throw new SemanticException("LOCATION clause illegal for view partition");
         }
         currentLocation = unescapeSQLString(child.getChild(0).getText());
-        boolean isLocal = false;
-        try {
-          // do best effort to determine if this is a local file
-          String scheme = new URI(currentLocation).getScheme();
-          if (scheme != null) {
-            isLocal = FileUtils.isLocalFile(conf, currentLocation);
-          }
-        } catch (URISyntaxException e) {
-          LOG.warn("Unable to create URI from " + currentLocation, e);
-        }
-        inputs.add(new ReadEntity(new Path(currentLocation), isLocal));
+        inputs.add(toReadEntity(currentLocation));
         break;
       default:
         throw new SemanticException("Unknown child: " + child);
@@ -3382,8 +3371,8 @@ public class DDLSemanticAnalyzer extends
         alterTblDesc), conf));
   }
 
-  private void addLocationToOutputs(String newLocation) {
-    outputs.add(new WriteEntity(new Path(newLocation), FileUtils.isLocalFile(conf, newLocation)));
+  private void addLocationToOutputs(String newLocation) throws SemanticException {
+    outputs.add(toWriteEntity(newLocation));
   }
 
   /**

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java Mon
Dec 15 02:25:06 2014
@@ -28,13 +28,11 @@ import org.antlr.runtime.tree.Tree;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.plan.CopyWork;
 
@@ -121,7 +119,6 @@ public class ExportSemanticAnalyzer exte
       rootTasks.add(rTask);
       inputs.add(new ReadEntity(ts.tableHandle));
     }
-    boolean isLocal = FileUtils.isLocalFile(conf, toURI);
-    outputs.add(new WriteEntity(parentPath, isLocal));
+    outputs.add(toWriteEntity(parentPath));
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java Mon
Dec 15 02:25:06 2014
@@ -22,8 +22,6 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.Database;
@@ -189,8 +187,7 @@ public class FunctionSemanticAnalyzer ex
     if (resources != null) {
       for (ResourceUri resource : resources) {
         String uriPath = resource.getUri();
-        outputs.add(new WriteEntity(new Path(uriPath),
-            FileUtils.isLocalFile(conf, uriPath)));
+        outputs.add(toWriteEntity(uriPath));
       }
     }
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java Mon
Dec 15 02:25:06 2014
@@ -35,7 +35,6 @@ import org.apache.commons.lang.ObjectUti
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.Warehouse;
@@ -47,7 +46,6 @@ import org.apache.hadoop.hive.ql.ErrorMs
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -95,8 +93,7 @@ public class ImportSemanticAnalyzer exte
       List<AddPartitionDesc> partitionDescs = new ArrayList<AddPartitionDesc>();
       Path fromPath = new Path(fromURI.getScheme(), fromURI.getAuthority(),
           fromURI.getPath());
-      boolean isLocal = FileUtils.isLocalFile(conf, fromURI);
-      inputs.add(new ReadEntity(fromPath, isLocal));
+      inputs.add(toReadEntity(fromPath));
       try {
         Path metadataPath = new Path(fromPath, METADATA_NAME);
         Map.Entry<org.apache.hadoop.hive.metastore.api.Table,
@@ -166,6 +163,7 @@ public class ImportSemanticAnalyzer exte
         case HiveParser.TOK_TABLELOCATION:
           String location = unescapeSQLString(child.getChild(0).getText());
           location = EximUtil.relativeToAbsolutePath(conf, location);
+          inputs.add(toReadEntity(location));
           tblDesc.setLocation(location);
           break;
         case HiveParser.TOK_TAB:

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java Mon Dec
15 02:25:06 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.parse;
 
 import org.antlr.runtime.tree.Tree;
-import org.apache.commons.httpclient.URIException;
 import org.apache.commons.httpclient.util.URIUtil;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.fs.FileStatus;
@@ -32,7 +31,6 @@ import org.apache.hadoop.hive.ql.ErrorMs
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -55,9 +53,6 @@ import java.util.Map;
  */
 public class LoadSemanticAnalyzer extends BaseSemanticAnalyzer {
 
-  private boolean isLocal;
-  private boolean isOverWrite;
-
   public LoadSemanticAnalyzer(HiveConf conf) throws SemanticException {
     super(conf);
   }
@@ -85,7 +80,7 @@ public class LoadSemanticAnalyzer extend
     return (srcs);
   }
 
-  private URI initializeFromURI(String fromPath) throws IOException,
+  private URI initializeFromURI(String fromPath, boolean isLocal) throws IOException,
       URISyntaxException {
     URI fromURI = new Path(fromPath).toUri();
 
@@ -172,8 +167,8 @@ public class LoadSemanticAnalyzer extend
 
   @Override
   public void analyzeInternal(ASTNode ast) throws SemanticException {
-    isLocal = false;
-    isOverWrite = false;
+    boolean isLocal = false;
+    boolean isOverWrite = false;
     Tree fromTree = ast.getChild(0);
     Tree tableTree = ast.getChild(1);
 
@@ -194,7 +189,7 @@ public class LoadSemanticAnalyzer extend
     URI fromURI;
     try {
       String fromPath = stripQuotes(fromTree.getText());
-      fromURI = initializeFromURI(fromPath);
+      fromURI = initializeFromURI(fromPath, isLocal);
     } catch (IOException e) {
       throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(fromTree, e
           .getMessage()), e);
@@ -233,7 +228,7 @@ public class LoadSemanticAnalyzer extend
 
     // make sure the arguments make sense
     applyConstraints(fromURI, toURI, fromTree, isLocal);
-    inputs.add(new ReadEntity(new Path(fromURI), isLocal));
+    inputs.add(toReadEntity(new Path(fromURI)));
     Task<? extends Serializable> rTask = null;
 
     // create final load/move work

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Mon Dec 15
02:25:06 2014
@@ -10950,7 +10950,7 @@ public class SemanticAnalyzer extends Ba
       case HiveParser.TOK_TABLELOCATION:
         location = unescapeSQLString(child.getChild(0).getText());
         location = EximUtil.relativeToAbsolutePath(conf, location);
-        inputs.add(new ReadEntity(new Path(location), FileUtils.isLocalFile(conf, location)));
+        inputs.add(toReadEntity(location));
         break;
       case HiveParser.TOK_TABLEPROPERTIES:
         tblProps = DDLSemanticAnalyzer.getProps((ASTNode) child.getChild(0));

Modified: hive/trunk/ql/src/test/results/clientnegative/udf_local_resource.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/udf_local_resource.q.out?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/udf_local_resource.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/udf_local_resource.q.out Mon Dec 15 02:25:06
2014
@@ -1,6 +1,6 @@
 PREHOOK: query: create function lookup as 'org.apache.hadoop.hive.ql.udf.UDFFileLookup' using
file '../../data/files/sales.txt'
 PREHOOK: type: CREATEFUNCTION
-#### A masked pattern was here ####
 PREHOOK: Output: database:default
 PREHOOK: Output: default.lookup
+#### A masked pattern was here ####
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask.
Hive warehouse is non-local, but ../../data/files/sales.txt specifies file on local filesystem.
Resources on non-local warehouse should specify a non-local scheme/path

Modified: hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out Mon Dec 15
02:25:06 2014
@@ -2,6 +2,6 @@ PREHOOK: query: create function lookup a
 PREHOOK: type: CREATEFUNCTION
 PREHOOK: Output: database:default
 PREHOOK: Output: default.lookup
-PREHOOK: Output: nonexistent_file.txt
+#### A masked pattern was here ####
 nonexistent_file.txt does not exist
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask.
nonexistent_file.txt does not exist

Modified: hive/trunk/ql/src/test/results/clientpositive/add_part_multiple.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/add_part_multiple.q.out?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/add_part_multiple.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/add_part_multiple.q.out Mon Dec 15 02:25:06
2014
@@ -39,9 +39,7 @@ PARTITION (ds='2010-02-01') location 'B'
 PARTITION (ds='2010-03-01')
 PARTITION (ds='2010-04-01') location 'C'
 PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: A
-PREHOOK: Input: B
-PREHOOK: Input: C
+#### A masked pattern was here ####
 PREHOOK: Output: default@add_part_test
 POSTHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS
 PARTITION (ds='2010-01-01') location 'A'
@@ -49,9 +47,7 @@ PARTITION (ds='2010-02-01') location 'B'
 PARTITION (ds='2010-03-01')
 PARTITION (ds='2010-04-01') location 'C'
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: A
-POSTHOOK: Input: B
-POSTHOOK: Input: C
+#### A masked pattern was here ####
 POSTHOOK: Output: default@add_part_test
 POSTHOOK: Output: default@add_part_test@ds=2010-01-01
 POSTHOOK: Output: default@add_part_test@ds=2010-02-01

Modified: hive/trunk/ql/src/test/results/clientpositive/alter2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/alter2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/alter2.q.out Mon Dec 15 02:25:06 2014
@@ -30,11 +30,11 @@ POSTHOOK: type: SHOWPARTITIONS
 POSTHOOK: Input: default@alter2
 PREHOOK: query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01'
 PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: 2008/01/01
+#### A masked pattern was here ####
 PREHOOK: Output: default@alter2
 POSTHOOK: query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01'
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: 2008/01/01
+#### A masked pattern was here ####
 POSTHOOK: Output: default@alter2
 POSTHOOK: Output: default@alter2@insertdate=2008-01-01
 PREHOOK: query: describe extended alter2
@@ -62,11 +62,11 @@ POSTHOOK: Input: default@alter2
 insertdate=2008-01-01
 PREHOOK: query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02'
 PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: 2008/01/02
+#### A masked pattern was here ####
 PREHOOK: Output: default@alter2
 POSTHOOK: query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02'
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: 2008/01/02
+#### A masked pattern was here ####
 POSTHOOK: Output: default@alter2
 POSTHOOK: Output: default@alter2@insertdate=2008-01-02
 PREHOOK: query: describe extended alter2
@@ -133,11 +133,11 @@ POSTHOOK: type: SHOWPARTITIONS
 POSTHOOK: Input: default@alter2
 PREHOOK: query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01'
 PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: 2008/01/01
+#### A masked pattern was here ####
 PREHOOK: Output: default@alter2
 POSTHOOK: query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01'
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: 2008/01/01
+#### A masked pattern was here ####
 POSTHOOK: Output: default@alter2
 POSTHOOK: Output: default@alter2@insertdate=2008-01-01
 PREHOOK: query: describe extended alter2
@@ -165,11 +165,11 @@ POSTHOOK: Input: default@alter2
 insertdate=2008-01-01
 PREHOOK: query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02'
 PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: 2008/01/02
+#### A masked pattern was here ####
 PREHOOK: Output: default@alter2
 POSTHOOK: query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02'
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: 2008/01/02
+#### A masked pattern was here ####
 POSTHOOK: Output: default@alter2
 POSTHOOK: Output: default@alter2@insertdate=2008-01-02
 PREHOOK: query: describe extended alter2
@@ -277,11 +277,11 @@ POSTHOOK: type: SHOWPARTITIONS
 POSTHOOK: Input: alter2_db@alter2
 #### A masked pattern was here ####
 PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: 2008/01/01
+#### A masked pattern was here ####
 PREHOOK: Output: alter2_db@alter2
 #### A masked pattern was here ####
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: 2008/01/01
+#### A masked pattern was here ####
 POSTHOOK: Output: alter2_db@alter2
 POSTHOOK: Output: alter2_db@alter2@insertdate=2008-01-01
 PREHOOK: query: DESCRIBE EXTENDED alter2
@@ -309,11 +309,11 @@ POSTHOOK: Input: alter2_db@alter2
 insertdate=2008-01-01
 #### A masked pattern was here ####
 PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: 2008/01/02
+#### A masked pattern was here ####
 PREHOOK: Output: alter2_db@alter2
 #### A masked pattern was here ####
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: 2008/01/02
+#### A masked pattern was here ####
 POSTHOOK: Output: alter2_db@alter2
 POSTHOOK: Output: alter2_db@alter2@insertdate=2008-01-02
 PREHOOK: query: DESCRIBE EXTENDED alter2
@@ -380,11 +380,11 @@ POSTHOOK: type: SHOWPARTITIONS
 POSTHOOK: Input: alter2_db@alter2
 #### A masked pattern was here ####
 PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: 2008/01/01
+#### A masked pattern was here ####
 PREHOOK: Output: alter2_db@alter2
 #### A masked pattern was here ####
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: 2008/01/01
+#### A masked pattern was here ####
 POSTHOOK: Output: alter2_db@alter2
 POSTHOOK: Output: alter2_db@alter2@insertdate=2008-01-01
 PREHOOK: query: DESCRIBE EXTENDED alter2
@@ -412,11 +412,11 @@ POSTHOOK: Input: alter2_db@alter2
 insertdate=2008-01-01
 #### A masked pattern was here ####
 PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: 2008/01/02
+#### A masked pattern was here ####
 PREHOOK: Output: alter2_db@alter2
 #### A masked pattern was here ####
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: 2008/01/02
+#### A masked pattern was here ####
 POSTHOOK: Output: alter2_db@alter2
 POSTHOOK: Output: alter2_db@alter2@insertdate=2008-01-02
 PREHOOK: query: DESCRIBE EXTENDED alter2

Modified: hive/trunk/ql/src/test/results/clientpositive/alter5.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/alter5.q.out?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/alter5.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/alter5.q.out Mon Dec 15 02:25:06 2014
@@ -36,7 +36,7 @@ PREHOOK: query: --
 --
 alter table alter5 add partition (dt='a') location 'parta'
 PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: parta
+#### A masked pattern was here ####
 PREHOOK: Output: default@alter5
 POSTHOOK: query: --
 -- Here's the interesting bit for HIVE-2117 - partition subdir should be
@@ -44,7 +44,7 @@ POSTHOOK: query: --
 --
 alter table alter5 add partition (dt='a') location 'parta'
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: parta
+#### A masked pattern was here ####
 POSTHOOK: Output: default@alter5
 POSTHOOK: Output: default@alter5@dt=a
 PREHOOK: query: describe extended alter5 partition (dt='a')
@@ -185,11 +185,11 @@ POSTHOOK: Output: alter5_db@alter5
 POSTHOOK: Output: database:alter5_db
 PREHOOK: query: alter table alter5 add partition (dt='a') location 'parta'
 PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: parta
+#### A masked pattern was here ####
 PREHOOK: Output: alter5_db@alter5
 POSTHOOK: query: alter table alter5 add partition (dt='a') location 'parta'
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: parta
+#### A masked pattern was here ####
 POSTHOOK: Output: alter5_db@alter5
 POSTHOOK: Output: alter5_db@alter5@dt=a
 PREHOOK: query: describe extended alter5 partition (dt='a')

Modified: hive/trunk/ql/src/test/results/clientpositive/exim_17_part_managed.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/exim_17_part_managed.q.out?rev=1645550&r1=1645549&r2=1645550&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/exim_17_part_managed.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/exim_17_part_managed.q.out Mon Dec 15 02:25:06
2014
@@ -126,12 +126,12 @@ POSTHOOK: Output: importer@exim_employee
 PREHOOK: query: alter table exim_employee add partition	(emp_country="us", emp_state="ap")
 	location 'ql/test/data/tablestore2/exim_employee'
 PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: ql/test/data/tablestore2/exim_employee
+#### A masked pattern was here ####
 PREHOOK: Output: importer@exim_employee
 POSTHOOK: query: alter table exim_employee add partition	(emp_country="us", emp_state="ap")
 	location 'ql/test/data/tablestore2/exim_employee'
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: ql/test/data/tablestore2/exim_employee
+#### A masked pattern was here ####
 POSTHOOK: Output: importer@exim_employee
 POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ap
 PREHOOK: query: show table extended like exim_employee



Mime
View raw message