hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From na...@apache.org
Subject svn commit: r900002 - in /hadoop/hive/branches/branch-0.5: ./ data/conf/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/test/org/apache/hadoop/hive/ql/
Date Sat, 16 Jan 2010 17:27:19 GMT
Author: namit
Date: Sat Jan 16 17:27:18 2010
New Revision: 900002

URL: http://svn.apache.org/viewvc?rev=900002&view=rev
Log:
HIVE-1046. Pass build.dir.hive and other properties to subant.
(Zheng Shao via namit)


Modified:
    hadoop/hive/branches/branch-0.5/CHANGES.txt
    hadoop/hive/branches/branch-0.5/build-common.xml
    hadoop/hive/branches/branch-0.5/build.xml
    hadoop/hive/branches/branch-0.5/data/conf/hive-log4j.properties
    hadoop/hive/branches/branch-0.5/data/conf/hive-site.xml
    hadoop/hive/branches/branch-0.5/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
    hadoop/hive/branches/branch-0.5/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java

Modified: hadoop/hive/branches/branch-0.5/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.5/CHANGES.txt?rev=900002&r1=900001&r2=900002&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.5/CHANGES.txt (original)
+++ hadoop/hive/branches/branch-0.5/CHANGES.txt Sat Jan 16 17:27:18 2010
@@ -448,6 +448,9 @@
     HIVE-1045. (bigint % int) should return bigint instead of double.
     (Paul Yang via zshao)
 
+    HIVE-1046. Pass build.dir.hive and other properties to subant.
+    (Zheng Shao via namit)
+
 Release 0.4.0 -  Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hive/branches/branch-0.5/build-common.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.5/build-common.xml?rev=900002&r1=900001&r2=900002&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.5/build-common.xml (original)
+++ hadoop/hive/branches/branch-0.5/build-common.xml Sat Jan 16 17:27:18 2010
@@ -305,6 +305,10 @@
       <sysproperty key="test.log.dir" value="${test.log.dir}"/>
       <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
       <sysproperty key="test.silent" value="${test.silent}"/>
+      <sysproperty key="test.tmp.dir" value="${build.dir}/tmp"/>
+      <sysproperty key="test.warehouse.dir" value="${build.dir}/test/data/warehouse"/>
+      <sysproperty key="build.dir" value="${build.dir}"/>
+      <sysproperty key="build.dir.hive" value="${build.dir.hive}"/>
 
       <classpath refid="${test.classpath.id}"/>
       <formatter type="${test.junit.output.format}" usefile="${test.junit.output.usefile}"
/>

Modified: hadoop/hive/branches/branch-0.5/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.5/build.xml?rev=900002&r1=900001&r2=900002&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.5/build.xml (original)
+++ hadoop/hive/branches/branch-0.5/build.xml Sat Jan 16 17:27:18 2010
@@ -68,6 +68,7 @@
     <sequential>
       <subant target="@{target}">
 	<property name="thrift.home" value="${thrift.home}"/>
+        <property name="build.dir.hive" location="${build.dir.hive}"/>
         <!-- TODO filelist dir="." files="metastore/build.xml,serde/build.xml,service/build.xml,odbc/build.xml"/-->
         <filelist dir="." files="odbc/build.xml"/>
       </subant>
@@ -78,6 +79,7 @@
     <attribute name="target"/>
     <sequential>
       <subant target="@{target}">
+        <property name="build.dir.hive" location="${build.dir.hive}"/>
         <filelist dir="." files="ant/build.xml,shims/build.xml,common/build.xml,serde/build.xml,metastore/build.xml,ql/build.xml,cli/build.xml,contrib/build.xml,service/build.xml,jdbc/build.xml,hwi/build.xml,ant/build.xml"/>
       </subant>
     </sequential>
@@ -87,6 +89,7 @@
     <attribute name="target"/>
     <sequential>
       <subant target="@{target}">
+        <property name="build.dir.hive" location="${build.dir.hive}"/>
         <filelist dir="." files="shims/build.xml,common/build.xml,serde/build.xml,metastore/build.xml,ql/build.xml,cli/build.xml,contrib/build.xml,service/build.xml,jdbc/build.xml,hwi/build.xml"/>
       </subant>
     </sequential>
@@ -181,6 +184,7 @@
           depends="jar"
           description="Generate test data">
     <subant target="gen-testdata">
+      <property name="build.dir.hive" location="${build.dir.hive}"/>
       <fileset dir="." includes="serde/build.xml"/>
     </subant>
   </target>

Modified: hadoop/hive/branches/branch-0.5/data/conf/hive-log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.5/data/conf/hive-log4j.properties?rev=900002&r1=900001&r2=900002&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.5/data/conf/hive-log4j.properties (original)
+++ hadoop/hive/branches/branch-0.5/data/conf/hive-log4j.properties Sat Jan 16 17:27:18 2010
@@ -1,6 +1,6 @@
 # Define some default values that can be overridden by system properties
 hive.root.logger=DEBUG,DRFA
-hive.log.dir=${user.dir}/../build/ql/tmp/
+hive.log.dir=${build.dir.hive}/ql/tmp/
 hive.log.file=hive.log
 
 # Define the root logger to the system property "hadoop.root.logger".

Modified: hadoop/hive/branches/branch-0.5/data/conf/hive-site.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.5/data/conf/hive-site.xml?rev=900002&r1=900001&r2=900002&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.5/data/conf/hive-site.xml (original)
+++ hadoop/hive/branches/branch-0.5/data/conf/hive-site.xml Sat Jan 16 17:27:18 2010
@@ -12,18 +12,19 @@
 <!-- Hive Execution Parameters -->
 <property>
   <name>hadoop.tmp.dir</name>
-  <value>${user.dir}/../build/test/hadoop-${user.name}</value>
+  <value>${build.dir.hive}/test/hadoop-${user.name}</value>
   <description>A base for other temporary directories.</description>
 </property>
 
 <property>
   <name>hive.exec.scratchdir</name>
-  <value>${user.dir}/../build/ql/tmp</value>
+  <value>${build.dir}/scratchdir</value>
   <description>Scratch space for Hive jobs</description>
 </property>
 
 <property>
   <name>javax.jdo.option.ConnectionURL</name>
+  <!-- note: variable substituion not working here because it's loaded by jdo, not Hive
-->
   <value>jdbc:derby:;databaseName=../build/test/junit_metastore_db;create=true</value>
 </property>
 
@@ -51,13 +52,13 @@
 <property>
   <!--  this should eventually be deprecated since the metastore should supply this -->
   <name>hive.metastore.warehouse.dir</name>
-  <value>file://${user.dir}/../build/ql/test/data/warehouse/</value>
+  <value>file://${build.dir}/test/data/warehouse/</value>
   <description></description>
 </property>
 
 <property>
   <name>hive.metastore.metadb.dir</name>
-  <value>file://${user.dir}/../build/ql/test/data/metadb/</value>
+  <value>file://${build.dir}/test/data/metadb/</value>
   <description>
   Required by metastore server or if the uris argument below is not supplied
   </description>
@@ -77,13 +78,13 @@
 
 <property>
   <name>test.log.dir</name>
-  <value>${user.dir}/../build/ql/test/logs</value>
+  <value>${build.dir}/test/logs</value>
   <description></description>
 </property>
 
 <property>
   <name>test.src.dir</name>
-  <value>file://${user.dir}/../ql/src/test</value>
+  <value>file://${build.dir}/src/test</value>
   <description></description>
 </property>
 
@@ -102,7 +103,7 @@
 
 <property>
   <name>hive.jar.path</name>
-  <value>${user.dir}/../build/ql/hive_exec.jar</value>
+  <value>${build.dir.hive}/ql/hive_exec.jar</value>
   <description></description>
 </property>
 
@@ -114,7 +115,7 @@
 
 <property>
   <name>hive.querylog.location</name>
-  <value>${user.dir}/../build/ql/tmp</value>
+  <value>${build.dir}/tmp</value>
   <description>Location of the structured hive logs</description>
 </property>
 

Modified: hadoop/hive/branches/branch-0.5/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.5/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java?rev=900002&r1=900001&r2=900002&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.5/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
(original)
+++ hadoop/hive/branches/branch-0.5/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
Sat Jan 16 17:27:18 2010
@@ -21,7 +21,9 @@
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.Serializable;
+import java.util.HashMap;
 import java.util.Map;
+import java.util.Properties;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -41,6 +43,10 @@
     
   private static final long serialVersionUID = 1L;
 
+  final static String hadoopMemKey = "HADOOP_HEAPSIZE";
+  final static String hadoopOptsKey = "HADOOP_OPTS";
+  final static String HIVE_SYS_PROP[] = {"build.dir", "build.dir.hive"}; 
+  
   public MapRedTask() {
     super();
   }
@@ -106,27 +112,50 @@
       LOG.info("Executing: " + cmdLine);
       Process executor = null;
 
-      // The user can specify the hadoop memory
-      int hadoopMem = conf.getIntVar(HiveConf.ConfVars.HIVEHADOOPMAXMEM);
-
-      if (hadoopMem == 0) 
-        executor = Runtime.getRuntime().exec(cmdLine);
-      // user specified the memory - only applicable for local mode
-      else {
-        Map<String, String> variables = System.getenv();
-        String[] env = new String[variables.size() + 1];
-        int pos = 0;
+      // Inherit Java system variables
+      String hadoopOpts;
+      {
+        StringBuilder sb = new StringBuilder();
+        Properties p = System.getProperties();
+        for (int k = 0; k < HIVE_SYS_PROP.length; k++) {
+          if (p.containsKey(HIVE_SYS_PROP[k])) {
+            sb.append(" -D" + HIVE_SYS_PROP[k] + "=" + p.getProperty(HIVE_SYS_PROP[k]));
+          }
+        }
+        hadoopOpts = sb.toString();
+      }
+      
+      // Inherit the environment variables
+      String[] env;
+      {
+        Map<String, String> variables = new HashMap(System.getenv());
+        // The user can specify the hadoop memory
+        int hadoopMem = conf.getIntVar(HiveConf.ConfVars.HIVEHADOOPMAXMEM);
         
-        for (Map.Entry<String, String> entry : variables.entrySet())  
-        {  
+        if (hadoopMem == 0) {
+          variables.remove(hadoopMemKey);
+        } else {
+          // user specified the memory - only applicable for local mode
+          variables.put(hadoopMemKey, String.valueOf(hadoopMem));
+        }
+        
+        if (variables.containsKey(hadoopOptsKey)) {
+          variables.put(hadoopOptsKey, variables.get(hadoopOptsKey) + hadoopOpts);
+        } else {
+          variables.put(hadoopOptsKey, hadoopOpts);
+        }
+        
+        env = new String[variables.size()];
+        int pos = 0;
+        for (Map.Entry<String, String> entry : variables.entrySet()) {  
           String name = entry.getKey();  
           String value = entry.getValue();  
           env[pos++] = name + "=" + value;  
         }  
-        
-        env[pos] = new String("HADOOP_HEAPSIZE=" + hadoopMem);
-        executor = Runtime.getRuntime().exec(cmdLine, env);
       }
+      
+      // Run ExecDriver in another JVM
+      executor = Runtime.getRuntime().exec(cmdLine, env);
 
       StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, System.out);
       StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, System.err);

Modified: hadoop/hive/branches/branch-0.5/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.5/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=900002&r1=900001&r2=900002&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.5/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hadoop/hive/branches/branch-0.5/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Sat
Jan 16 17:27:18 2010
@@ -42,6 +42,7 @@
 import org.apache.hadoop.hive.cli.CliDriver;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -71,8 +72,9 @@
 public class QTestUtil {
 
   private String testWarehouse;
-  private String tmpdir =  System.getProperty("user.dir")+"/../build/ql/tmp";
+  private String tmpdir =  System.getProperty("test.tmp.dir");
   private Path tmppath = new Path(tmpdir);
+
   private String testFiles;
   private String outDir;
   private String logDir;
@@ -703,26 +705,28 @@
   public int checkCliDriverResults(String tname) throws Exception {
     String [] cmdArray;
 
-    cmdArray = new String[14];
-    cmdArray[0] = "diff";
-    cmdArray[1] = "-a";
-    cmdArray[2] = "-I";
-    cmdArray[3] = "\\(file:\\)\\|\\(/tmp/.*\\)";
-    cmdArray[4] = "-I";
-    cmdArray[5] = "lastUpdateTime";
-    cmdArray[6] = "-I";
-    cmdArray[7] = "lastAccessTime";
-    cmdArray[8] = "-I";
-    cmdArray[9] = "owner";
-    cmdArray[10] = "-I";
-    cmdArray[11] = "transient_lastDdlTime";
-    cmdArray[12] = (new File(logDir, tname + ".out")).getPath();
-    cmdArray[13] = (new File(outDir, tname + ".out")).getPath();
-    System.out.println(cmdArray[0] + " " + cmdArray[1] + " " + cmdArray[2] + " " +
-                       cmdArray[3] + " " + cmdArray[4] + " " + cmdArray[5] + " " +
-                       cmdArray[6] + " " + cmdArray[7] + " " + cmdArray[8] + " " +
-                       cmdArray[9] + " " + cmdArray[10] + " " + cmdArray[11] + " " +
-                       cmdArray[12] + " " + cmdArray[13]);
+    cmdArray = new String[] {
+        "diff",
+        "-a",
+        "-I",
+        "file:",
+        "-I",
+        "/tmp/",
+        "-I",
+        "invalidscheme:",
+        "-I",
+        "lastUpdateTime",
+        "-I",
+        "lastAccessTime",
+        "-I",
+        "owner",
+        "-I",
+        "transient_lastDdlTime",
+        (new File(logDir, tname + ".out")).getPath(),
+        (new File(outDir, tname + ".out")).getPath()
+    };
+
+    System.out.println(org.apache.commons.lang.StringUtils.join(cmdArray, ' '));
 
     Process executor = Runtime.getRuntime().exec(cmdArray);
 



Mime
View raw message