pig-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From da...@apache.org
Subject svn commit: r1166264 - in /pig/branches/branch-0.9: CHANGES.txt bin/pig build.xml
Date Wed, 07 Sep 2011 16:58:25 GMT
Author: daijy
Date: Wed Sep  7 16:58:25 2011
New Revision: 1166264

URL: http://svn.apache.org/viewvc?rev=1166264&view=rev
Log:
PIG-2239: Pig should use 'bin/hadoop jar pig-withouthadoop.jar' in bin/pig instead of forming
java command itself

Modified:
    pig/branches/branch-0.9/CHANGES.txt
    pig/branches/branch-0.9/bin/pig
    pig/branches/branch-0.9/build.xml

Modified: pig/branches/branch-0.9/CHANGES.txt
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.9/CHANGES.txt?rev=1166264&r1=1166263&r2=1166264&view=diff
==============================================================================
--- pig/branches/branch-0.9/CHANGES.txt (original)
+++ pig/branches/branch-0.9/CHANGES.txt Wed Sep  7 16:58:25 2011
@@ -22,6 +22,8 @@ Release 0.9.1 - Unreleased
 
 IMPROVEMENTS
 
+PIG-2239: Pig should use "bin/hadoop jar pig-withouthadoop.jar" in bin/pig instead of forming
java command itself (daijy)
+
 PIG-2213: Pig 0.9.1 Documentation (chandec via daijy)
 
 PIG-2221: Couldnt find documentation for ColumnMapKeyPrune optimization rule (chandec via
daijy)

Modified: pig/branches/branch-0.9/bin/pig
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.9/bin/pig?rev=1166264&r1=1166263&r2=1166264&view=diff
==============================================================================
--- pig/branches/branch-0.9/bin/pig (original)
+++ pig/branches/branch-0.9/bin/pig Wed Sep  7 16:58:25 2011
@@ -34,10 +34,6 @@
 #
 #     PIG_CONF_DIR    Alternate conf dir. Default is ${PIG_HOME}/conf.
 #
-#     PIG_ROOT_LOGGER The root appender. Default is INFO,console
-#
-#     PIG_HADOOP_VERSION Version of hadoop to run with.    Default is 20 (0.20).
-#
 #     HBASE_CONF_DIR - Optionally, the HBase configuration to run against
 #                      when using HBaseStorage
 
@@ -47,12 +43,13 @@ CYGWIN*) cygwin=true;;
 esac
 debug=false
 
+remaining=()
 # filter command line parameter
-for f in $@; do
+for f in "$@"; do
      if [[ $f = "-secretDebugCmd" ]]; then
         debug=true
      else
-        remaining="${remaining} $f"
+        remaining+=("$f")
      fi
 done
 
@@ -67,20 +64,18 @@ this="$bin/$script"
 # the root of the Pig installation
 export PIG_HOME=`dirname "$this"`/..
 
-#check to see if the conf dir is given as an optional argument
-if [ $# -gt 1 ]
-then
-    if [ "--config" = "$1" ]
-    then
-        shift
-        confdir=$1
-        shift
-        PIG_CONF_DIR=$confdir
+if [ ${#PIG_CONF_DIR} -eq 0 ]; then
+    if [ -d ${PIG_HOME}/conf ]; then
+        PIG_CONF_DIR=${PIG_HOME}/conf
     fi
 fi
 
-# Allow alternate conf dir location.
-PIG_CONF_DIR="${PIG_CONF_DIR:-$PIG_HOME/etc/pig}"
+if [ ${#PIG_CONF_DIR} -eq 0 ]; then
+    if [ -d /etc/pig ]; then
+        # if installed with rpm/deb package
+        PIG_CONF_DIR="/etc/pig}"
+    fi
+fi
 
 if [ -f "${PIG_CONF_DIR}/pig-env.sh" ]; then
     . "${PIG_CONF_DIR}/pig-env.sh"
@@ -117,34 +112,9 @@ fi
 # so that filenames w/ spaces are handled correctly in loops below
 IFS=
 
-shopt -s nullglob
-# for releases tarball, add core pig to CLASSPATH
-for f in $PIG_HOME/pig-*-core.jar; do
-    PIG_JAR=$f;
-done
-
-# for development/source release, add pig-withouthadoop.jar
-if [ ${#PIG_JAR} -eq 0 ]; then
-    for f in $PIG_HOME/pig-*withouthadoop.jar; do
-        PIG_JAR=$f;
-    done
-fi
-
-# for deb/rpm package, add pig jar in /usr/share/pig
-if [ ${#PIG_JAR} -eq 0 ]; then
-    for f in $PIG_HOME/share/pig/pig-*withouthadoop.jar; do
-        PIG_JAR=$f;
-    done
-fi
-shopt -u nullglob
-
-if [ ${#PIG_JAR} -ne 0 ]; then
-    CLASSPATH=${CLASSPATH}:$PIG_JAR
-else
-    echo "Cannot locate pig jar. do 'ant jar-withouthadoop', and try again"
-    exit 1
-fi
 
+shopt -s nullglob
+shopt -s extglob
 # For Hadoop 0.23.0+
 #
 #if [ -d "${PIG_HOME}/share/hadoop/common" ]; then
@@ -165,20 +135,10 @@ fi
 #    done
 #fi
 
-# Set the version for Hadoop, default to 20
-PIG_HADOOP_VERSION="${PIG_HADOOP_VERSION:-20}"
-# add libs to CLASSPATH.    There can be more than one version of the hadoop
-# libraries in the lib dir, so don't blindly add them all.    Only add the one
-# that matche PIG_HADOOP_VERSION.
 if [[ ${#HADOOP_HOME} -ne 0 && -d ${HADOOP_HOME} ]]; then
-    HADOOP_LIB_DIR=$HADOOP_HOME
     if [ -f $HADOOP_HOME/bin/hadoop ]; then
         HADOOP_BIN=$HADOOP_HOME/bin/hadoop
     fi
-elif [ -d "$PIG_HOME/build/ivy/lib/Pig" ]; then
-    HADOOP_LIB_DIR=$PIG_HOME/build/ivy/lib/Pig
-else
-    HADOOP_LIB_DIR=$PIG_HOME/lib
 fi
 
 if [ ${#HADOOP_BIN} -eq 0 ]; then
@@ -187,27 +147,6 @@ if [ ${#HADOOP_BIN} -eq 0 ]; then
     fi
 fi
 
-HADOOP_CLASSPATH=`$HADOOP_BIN classpath 2>/dev/null`
-if [ ${#HADOOP_CLASSPATH} -eq 0 ]; then
-    for f in $HADOOP_LIB_DIR/*.jar; do
-        filename=`basename $f`
-        IS_HADOOP=`echo $filename | grep hadoop`
-        if [ ${#IS_HADOOP} -ne 0 ]; then
-            HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$f;
-        else
-            IS_RIGHT_VER=`echo $f | grep hadoop-0\.${PIG_HADOOP_VERSION}\.*.jar | grep core`
-            if [ ${#IS_RIGHT_VER} -ne 0 ]; then
-                HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$f;
-            fi
-        fi
-    done
-    if [ -d ${HADOOP_HOME} ]; then
-        HADOOP_CLASSPATH=${HADOOP_HOME}/conf:${HADOOP_CLASSPATH}
-    fi
-fi
-
-CLASSPATH=${CLASSPATH}:$HADOOP_CLASSPATH
-
 # if using HBase, likely want to include HBase config
 HBASE_CONF_DIR=${HBASE_CONF_DIR:-/etc/hbase}
 if [ -n "$HBASE_CONF_DIR" ] && [ -d "$HBASE_CONF_DIR" ]; then
@@ -250,18 +189,77 @@ fi
 # restore ordinary behaviour
 unset IFS
 
-CLASS=org.apache.pig.Main
-
 PIG_OPTS="$PIG_OPTS -Dpig.log.dir=$PIG_LOG_DIR"
 PIG_OPTS="$PIG_OPTS -Dpig.log.file=$PIG_LOGFILE"
 PIG_OPTS="$PIG_OPTS -Dpig.home.dir=$PIG_HOME"
-PIG_OPTS="$PIG_OPTS -Dpig.root.logger=${PIG_ROOT_LOGGER:-INFO,console,DRFA}"
 
 # run it
-if [ "$debug" == "true" ]; then
-    echo "dry run:"
-    echo "$JAVA" $JAVA_HEAP_MAX $PIG_OPTS -classpath "$CLASSPATH" $CLASS ${remaining}
-    echo
+if [ ${#HADOOP_BIN} -ne 0 ]; then
+    if [ "$debug" == "true" ]; then
+        echo "Find hadoop at $HADOOP_BIN"
+    fi
+    for f in $PIG_HOME/pig-*withouthadoop.jar; do
+        PIG_JAR=$f;
+    done
+
+    # for deb/rpm package, add pig jar in /usr/share/pig
+    if [ ${#PIG_JAR} -eq 0 ]; then
+        for f in $PIG_HOME/share/pig/pig-*withouthadoop.jar; do
+            PIG_JAR=$f;
+        done
+    fi
+
+    if [ ${#PIG_JAR} -ne 0 ]; then
+        CLASSPATH=${CLASSPATH}:$PIG_JAR
+    else
+        echo "Cannot locate pig-withouthadoop.jar. do 'ant jar-withouthadoop', and try again"
+        exit 1
+    fi
+
+    HADOOP_CLASSPATH=$CLASSPATH:$HADOOP_CLASSPATH
+    HADOOP_OPTS="$JAVA_HEAP_MAX $PIG_OPTS $HADOOP_OPTS"
+    COMMAND_LINE="$HADOOP_BIN jar $PIG_JAR ${remaining[@]}"
+    if [ "$debug" == "true" ]; then
+        echo "dry run:"
+        echo "HADOOP_CLASSPATH: $HADOOP_CLASSPATH"
+        echo "HADOOP_OPTS: $HADOOP_OPTS"
+        echo "$COMMAND_LINE"
+        echo
+    else
+        exec $COMMAND_LINE
+    fi
 else
-    exec "$JAVA" $JAVA_HEAP_MAX $PIG_OPTS -classpath "$CLASSPATH" $CLASS ${remaining}
+    # fall back to use fat pig.jar
+    if [ "$debug" == "true" ]; then
+        echo "Cannot find local hadoop installation, using bundled hadoop 20.2"
+    fi
+    for f in $PIG_HOME/pig-!(*withouthadoop).jar; do
+        PIG_JAR=$f;
+    done
+
+    if [ ${#PIG_JAR} -ne 0 ]; then
+        CLASSPATH=${CLASSPATH}:$PIG_JAR
+    else
+        echo "Cannot locate pig-all.jar. do 'ant jar', and try again"
+        exit 1
+    fi
+
+    if [ -d "$PIG_HOME/build/ivy/lib/Pig" ]; then
+        HADOOP_LIB_DIR=$PIG_HOME/build/ivy/lib/Pig
+    else
+        HADOOP_LIB_DIR=$PIG_HOME/lib
+    fi
+    for f in $HADOOP_LIB_DIR/*.jar; do
+        CLASSPATH=${CLASSPATH}:$f;
+    done
+    CLASS=org.apache.pig.Main
+    if [ "$debug" == "true" ]; then
+        echo "dry run:"
+        echo "$JAVA" $JAVA_HEAP_MAX $PIG_OPTS -classpath "$CLASSPATH" $CLASS "${remaining[@]}"
+        echo
+    else
+        exec "$JAVA" $JAVA_HEAP_MAX $PIG_OPTS -classpath "$CLASSPATH" $CLASS "${remaining[@]}"
+    fi
 fi
+shopt -u nullglob
+shopt -u extglob

Modified: pig/branches/branch-0.9/build.xml
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.9/build.xml?rev=1166264&r1=1166263&r2=1166264&view=diff
==============================================================================
--- pig/branches/branch-0.9/build.xml (original)
+++ pig/branches/branch-0.9/build.xml Wed Sep  7 16:58:25 2011
@@ -15,7 +15,7 @@
    limitations under the License.
 -->
 
-<project name="Pig" default="jar" 
+<project name="Pig" default="jar-withouthadoop" 
 	xmlns:artifact="urn:maven-artifact-ant"
 	xmlns:ivy="antlib:org.apache.ivy.ant">
     <!-- Load all the default properties, and any the user wants    -->
@@ -73,7 +73,7 @@
     <property name="output.jarfile.javadoc" value="${build.dir}/${final.name}-javadoc.jar"
/>
     <!-- Maintain old pig.jar in top level directory. -->
     <property name="output.jarfile.backcompat.withouthadoop" value="${basedir}/${name}-withouthadoop.jar"
/>
-    <property name="output.jarfile.backcompat" value="${basedir}/${name}.jar" />
+    <property name="output.jarfile.backcompat" value="${basedir}/${name}-all.jar" />
 
     <!-- test properties -->
     <property name="test.src.dir" value="${basedir}/test" />
@@ -277,6 +277,8 @@
         <delete file="${jdiff.xml.dir}\${name}_${version}.xml" />
         <delete dir="${build.dir}" />
         <delete dir="${src.gen.dot.parser.dir}" />
+        <delete file="${output.jarfile.backcompat}" />
+        <delete file="${output.jarfile.backcompat.withouthadoop}" />
     </target>
     
     <target name="eclipse-files" depends="compile, ivy-buildJar"
@@ -513,7 +515,7 @@
     <!-- ================================================================== -->
     <!-- TODO we should also exclude test here...                           -->
     <!-- ================================================================== -->
-    <target name="jar" depends="compile" description="Create pig jar">
+    <target name="jar" depends="compile" description="Create pig-all.jar">
         <antcall target="jarWithSvn" inheritRefs="true" inheritall="true"/>
         <antcall target="jarWithOutSvn" inheritRefs="true" inheritall="true"/>
     </target>
@@ -644,16 +646,10 @@
             <zipfileset src="${lib.dir}/${automaton.jarfile}" />
             <zipfileset src="${ivy.lib.dir}/antlr-runtime-${antlr.version}.jar" />
             <zipfileset src="${ivy.lib.dir}/stringtemplate-${antlr.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/junit-${junit.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/jsch-${jsch.version}.jar" />
             <zipfileset src="${ivy.lib.dir}/jline-${jline.version}.jar" />
             <zipfileset src="${ivy.lib.dir}/jackson-mapper-asl-${jackson.version}.jar"
/>
             <zipfileset src="${ivy.lib.dir}/jackson-core-asl-${jackson.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/joda-time-${joda-time.version}.jar" />
             <zipfileset src="${ivy.lib.dir}/${guava.jar}" />
-            <zipgroupfileset dir="${ivy.lib.dir}" includes="commons*.jar"/>
-            <zipgroupfileset dir="${ivy.lib.dir}" includes="log4j*.jar"/>
-            <zipgroupfileset dir="${ivy.lib.dir}" includes="jsp-api*.jar"/>
             <fileset file="${basedir}/conf/pig-default.properties" />
         </jar>
         <copy file="${output.jarfile.withouthadoop}" tofile="${output.jarfile.backcompat.withouthadoop}"/>
@@ -773,8 +769,6 @@
         <mkdir dir="${dist.dir}/share/doc/pig/jdiff"/>
         <mkdir dir="${dist.dir}/share/doc/pig/license" />
 
-        <copy file="${ivy.lib.dir}/hadoop-core-${hadoop-core.version}.jar" todir="${dist.dir}/lib"/>
-
         <copy todir="${dist.dir}/lib/jdiff" includeEmptyDirs="false">
             <fileset dir="${lib.dir}/jdiff"/>
         </copy>
@@ -825,7 +819,7 @@
 
     </target>
 
-    <target name="package" depends="docs, api-report" description="Create a Pig release">
+    <target name="package" depends="docs, api-report, jar" description="Create a Pig release">
             <mkdir dir="${dist.dir}" />
         <mkdir dir="${dist.dir}/lib" />
         <mkdir dir="${dist.dir}/conf" />
@@ -835,14 +829,14 @@
         <mkdir dir="${dist.dir}/docs/jdiff"/>
         <mkdir dir="${dist.dir}/license" />
 
-        <copy file="${ivy.lib.dir}/hadoop-core-${hadoop-core.version}.jar" todir="${dist.dir}/lib"/>
-
         <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
             <fileset dir="${lib.dir}"/>
         </copy>
 
         <copy file="${output.jarfile.backcompat.withouthadoop}" tofile="${dist.dir}/${final.name}-withouthadoop.jar"
/>
 
+        <copy file="${output.jarfile.backcompat}" tofile="${dist.dir}/${final.name}-all.jar"
/>
+
         <copy todir="${dist.dir}/" file="ivy.xml" />
 
         <copy todir="${dist.dir}/ivy">



Mime
View raw message