pig-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From da...@apache.org
Subject svn commit: r1604060 - in /pig/branches/branch-0.13: CHANGES.txt bin/pig bin/pig.py build.xml
Date Fri, 20 Jun 2014 00:56:48 GMT
Author: daijy
Date: Fri Jun 20 00:56:48 2014
New Revision: 1604060

URL: http://svn.apache.org/r1604060
Log:
PIG-3892: Pig distribution for hadoop 2

Modified:
    pig/branches/branch-0.13/CHANGES.txt
    pig/branches/branch-0.13/bin/pig
    pig/branches/branch-0.13/bin/pig.py
    pig/branches/branch-0.13/build.xml

Modified: pig/branches/branch-0.13/CHANGES.txt
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.13/CHANGES.txt?rev=1604060&r1=1604059&r2=1604060&view=diff
==============================================================================
--- pig/branches/branch-0.13/CHANGES.txt (original)
+++ pig/branches/branch-0.13/CHANGES.txt Fri Jun 20 00:56:48 2014
@@ -34,6 +34,8 @@ PIG-2207: Support custom counters for ag
 
 IMPROVEMENTS
 
+PIG-3892: Pig distribution for hadoop 2 (daijy)
+
 PIG-3999: Document PIG-3388 (lbendig via cheolsoo)
 
 PIG-3954: Document use of user level jar cache (aniket486)

Modified: pig/branches/branch-0.13/bin/pig
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.13/bin/pig?rev=1604060&r1=1604059&r2=1604060&view=diff
==============================================================================
--- pig/branches/branch-0.13/bin/pig (original)
+++ pig/branches/branch-0.13/bin/pig Fri Jun 20 00:56:48 2014
@@ -282,6 +282,25 @@ if [ -z "$HADOOP_BIN" ]; then
     fi
 fi
 
+# find out the HADOOP_HOME in order to find hadoop jar
+# we use the name of hadoop jar to decide if user is using
+# hadoop 1 or hadoop 2
+if [[ -z "$HADOOP_HOME" && -n "$HADOOP_PREFIX" ]]; then
+    HADOOP_HOME=$HADOOP_PREFIX
+fi
+
+if [[ -z "$HADOOP_HOME" && -n "$HADOOP_BIN" ]]; then
+    HADOOP_HOME=`dirname $HADOOP_BIN`/..
+fi
+
+HADOOP_CORE_JAR=`echo ${HADOOP_HOME}/hadoop-core*.jar`
+
+if [ -z "$HADOOP_CORE_JAR" ]; then
+    HADOOP_VERSION=2
+else
+    HADOOP_VERSION=1
+fi
+
 # if using HBase, likely want to include HBase jars and config
 HBH=${HBASE_HOME:-"${PIG_HOME}/share/hbase"}
 if [ -d "${HBH}" ]; then
@@ -345,21 +364,25 @@ if [ -n "$HADOOP_BIN" ]; then
         echo "Find hadoop at $HADOOP_BIN"
     fi
 
-    if [ -f $PIG_HOME/pig-withouthadoop.jar ]; then
-        PIG_JAR=$PIG_HOME/pig-withouthadoop.jar
+    if [ -f $PIG_HOME/pig-withouthadoop-h${HADOOP_VERSION}.jar ]; then
+        PIG_JAR=$PIG_HOME/pig-withouthadoop-h${HADOOP_VERSION}.jar
     else
-        PIG_JAR=`echo $PIG_HOME/pig-?.*withouthadoop.jar`
+        PIG_JAR=`echo $PIG_HOME/pig-?.*withouthadoop-h${HADOOP_VERSION}.jar`
     fi
 
     # for deb/rpm package, add pig jar in /usr/share/pig
     if [ -z "$PIG_JAR" ]; then
-        PIG_JAR=`echo $PIG_HOME/share/pig/pig-*withouthadoop.jar`
+        PIG_JAR=`echo $PIG_HOME/share/pig/pig-*withouthadoop-h${HADOOP_VERSION}.jar`
     fi
 
     if [ -n "$PIG_JAR" ]; then
         CLASSPATH=${CLASSPATH}:$PIG_JAR
     else
-        echo "Cannot locate pig-withouthadoop.jar. do 'ant jar-withouthadoop', and try again"
+        if [ "$HADOOP_VERSION" == "1" ]; then
+            echo "Cannot locate pig-withouthadoop-h${HADOOP_VERSION}.jar. do 'ant jar-withouthadoop',
and try again"
+        else
+            echo "Cannot locate pig-withouthadoop-h${HADOOP_VERSION}.jar. do 'ant -Dhadoopversion=23
jar-withouthadoop', and try again"
+        fi
         exit 1
     fi
 
@@ -378,10 +401,10 @@ if [ -n "$HADOOP_BIN" ]; then
     fi
 else
     # fall back to use fat pig.jar
-    if [ -f $PIG_HOME/pig.jar ]; then
-        PIG_JAR=$PIG_HOME/pig.jar
+    if [ -f $PIG_HOME/pig-h1.jar ]; then
+        PIG_JAR=$PIG_HOME/pig-h1.jar
     else
-        PIG_JAR=`echo $PIG_HOME/pig-?.!(*withouthadoop).jar`
+        PIG_JAR=`echo $PIG_HOME/pig-?.!(*withouthadoop)-h1.jar`
     fi
 
     if [ -n "$PIG_JAR" ]; then

Modified: pig/branches/branch-0.13/bin/pig.py
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.13/bin/pig.py?rev=1604060&r1=1604059&r2=1604060&view=diff
==============================================================================
--- pig/branches/branch-0.13/bin/pig.py (original)
+++ pig/branches/branch-0.13/bin/pig.py Fri Jun 20 00:56:48 2014
@@ -307,7 +307,6 @@ pigJar = ""
 hadoopBin = ""
 
 print "HADOOP_HOME: %s" % os.path.expandvars(os.environ['HADOOP_HOME'])
-print "HADOOP_PREFIX: %s" % os.path.expandvars(os.environ['HADOOP_PREFIX'])
 
 if (os.environ.get('HADOOP_PREFIX') is not None):
   print "Found a hadoop prefix"
@@ -326,28 +325,46 @@ if hadoopBin == "":
   if os.path.exists(os.path.join(os.path.sep + "usr", "bin", "hadoop")):
     hadoopBin = os.path.join(os.path.sep + "usr", "bin", "hadoop")
 
+# find out the HADOOP_HOME in order to find hadoop jar
+# we use the name of hadoop jar to decide if user is using
+# hadoop 1 or hadoop 2
+if (hadoopHomePath is None and hadoopPrefixPath is not None):
+  hadoopHomePath = hadoopPrefixPath
+
+if (os.environ.get('HADOOP_HOME') is None and hadoopBin != ""):
+  hadoopHomePath = os.path.join(hadoopBin, "..")
+
+hadoopCoreJars = glob.glob(os.path.join(hadoopHomePath, "hadoop-core*.jar"))
+if len(hadoopCoreJars) == 0:
+  hadoopVersion = 2
+else:
+  hadoopVersion = 1
+
 if hadoopBin != "":
   if debug == True:
     print "Find hadoop at %s" % hadoopBin
 
-  if os.path.exists(os.path.join(os.environ['PIG_HOME'], "pig-withouthadoop.jar")):
-    pigJar = os.path.join(os.environ['PIG_HOME'], "pig-withouthadoop.jar")
+  if os.path.exists(os.path.join(os.environ['PIG_HOME'], "pig-withouthadoop-h$hadoopVersion.jar")):
+    pigJar = os.path.join(os.environ['PIG_HOME'], "pig-withouthadoop-h$hadoopVersion.jar")
 
   else:
-    pigJars = glob.glob(os.path.join(os.environ['PIG_HOME'], "pig-?.*withouthadoop.jar"))
+    pigJars = glob.glob(os.path.join(os.environ['PIG_HOME'], "pig-*withouthadoop-h" + str(hadoopVersion)
+ ".jar"))
     if len(pigJars) == 1:
       pigJar = pigJars[0]
 
     elif len(pigJars) > 1:
       print "Ambiguity with pig jars found the following jars"
       print pigJars
-      sys.exit("Please remove irrelavant jars fromt %s" % os.path.join(os.environ['PIG_HOME'],
"pig-?.*withouthadoop.jar"))
+      sys.exit("Please remove irrelavant jars fromt %s" % os.path.join(os.environ['PIG_HOME'],
"pig-*withouthadoop.jar"))
     else:
       pigJars = glob.glob(os.path.join(os.environ['PIG_HOME'], "share", "pig", "pig-*withouthadoop.jar"))
       if len(pigJars) == 1:
         pigJar = pigJars[0]
       else:
-        sys.exit("Cannot locate pig-withouthadoop.jar do 'ant jar-withouthadoop', and try
again")
+        if hadoopVersion == 1:
+          sys.exit("Cannot locate pig-withouthadoop-h1.jar do 'ant jar-withouthadoop', and
try again")
+        else:
+          sys.exit("Cannot locate pig-withouthadoop-h2.jar do 'ant -Dhadoopversion=23 jar-withouthadoop',
and try again")
 
   if 'HADOOP_CLASSPATH' in os.environ:
     os.environ['HADOOP_CLASSPATH'] += os.pathsep + classpath
@@ -372,20 +389,24 @@ else:
   if debug == True:
     print "Cannot find local hadoop installation, using bundled hadoop 20.2"
     
-  if os.path.exists(os.path.join(os.environ['PIG_HOME'], "pig.jar")):
-    pigJar = os.path.join(os.environ['PIG_HOME'], "pig.jar")
+  if os.path.exists(os.path.join(os.environ['PIG_HOME'], "pig-h1.jar")):
+    pigJar = os.path.join(os.environ['PIG_HOME'], "pig-h1.jar")
 
   else:
-    pigJars = glob.glob(os.path.join(os.environ['PIG_HOME'], "pig-?.!(*withouthadoop).jar"))
+    pigJars = glob.glob(os.path.join(os.environ['PIG_HOME'], "pig-*-h1.jar"))
+    for pigJar in pigJars:
+      if "withouthadoop" in pigJar:
+        pigJars.remove(pigJar)
+
     if len(pigJars) == 1:
       pigJar = pigJars[0]
 
     elif len(pigJars) > 1:
       print "Ambiguity with pig jars found the following jars"
       print pigJars
-      sys.exit("Please remove irrelavant jars fromt %s" % os.path.join(os.environ['PIG_HOME'],
"pig-?.*withouthadoop.jar"))
+      sys.exit("Please remove irrelavant jars fromt %s" % os.path.join(os.environ['PIG_HOME'],
"pig-h1.jar"))
     else:
-      sys.exit("Cannot locate pig.jar. do 'ant jar' and try again")
+      sys.exit("Cannot locate pig-h1.jar. do 'ant jar' and try again")
 
   classpath += os.pathsep + pigJar
   pigClass = "org.apache.pig.Main"

Modified: pig/branches/branch-0.13/build.xml
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.13/build.xml?rev=1604060&r1=1604059&r2=1604060&view=diff
==============================================================================
--- pig/branches/branch-0.13/build.xml (original)
+++ pig/branches/branch-0.13/build.xml Fri Jun 20 00:56:48 2014
@@ -68,8 +68,10 @@
 
     <!-- artifact jar file names -->
     <property name="artifact.pig.jar" value="${final.name}.jar"/>
+    <property name="artifact.pig-h1.jar" value="${final.name}-h1.jar"/>
     <property name="artifact.pig-h2.jar" value="${final.name}-h2.jar"/>
     <property name="artifact.pig-withouthadoop.jar" value="${final.name}-withouthadoop.jar"/>
+    <property name="artifact.pig-withouthadoop-h1.jar" value="${final.name}-withouthadoop-h1.jar"/>
     <property name="artifact.pig-withouthadoop-h2.jar" value="${final.name}-withouthadoop-h2.jar"/>
     <property name="artifact.pig-withdependencies.jar" value="${final.name}-withdependencies.jar"/>
     <property name="artifact.pig-sources.jar" value="${final.name}-sources.jar"/>
@@ -78,16 +80,21 @@
 
     <!-- jar names. TODO we might want to use the svn reversion name in the name in case
it is a dev version -->
     <property name="output.jarfile.withdependencies" value="${build.dir}/${artifact.pig-withdependencies.jar}"
/>
-    <property name="output.stage.jarfile.withdependencies" value="${build.dir}/${final.name}-withdependencies.stage.jar"
/>
     <property name="output.jarfile.withouthadoop" value="${build.dir}/${artifact.pig-withouthadoop.jar}"
/>
+    <property name="output.jarfile.withouthadoop-h1" value="${build.dir}/${artifact.pig-withouthadoop-h1.jar}"
/>
     <property name="output.jarfile.withouthadoop-h2" value="${build.dir}/${artifact.pig-withouthadoop-h2.jar}"
/>
     <property name="output.jarfile.core" value="${build.dir}/${artifact.pig.jar}" />
+    <property name="output.jarfile.core-h1" value="${build.dir}/${artifact.pig-h1.jar}"
/>
     <property name="output.jarfile.core-h2" value="${build.dir}/${artifact.pig-h2.jar}"
/>
     <property name="output.jarfile.sources" value="${build.dir}/${artifact.pig-sources.jar}"
/>
     <property name="output.jarfile.javadoc" value="${build.dir}/${artifact.pig-javadoc.jar}"
/>
     <!-- Maintain old pig.jar in top level directory. -->
     <property name="output.jarfile.backcompat" value="${basedir}/${name}.jar" />
+    <property name="output.jarfile.backcompat-h1" value="${basedir}/${name}-h1.jar" />
+    <property name="output.jarfile.backcompat-h2" value="${basedir}/${name}-h2.jar" />
     <property name="output.jarfile.backcompat.withouthadoop" value="${basedir}/${name}-withouthadoop.jar"
/>
+    <property name="output.jarfile.backcompat.withouthadoop-h1" value="${basedir}/${name}-withouthadoop-h1.jar"
/>
+    <property name="output.jarfile.backcompat.withouthadoop-h2" value="${basedir}/${name}-withouthadoop-h2.jar"
/>
 
     <!-- test properties -->
     <property name="test.src.dir" value="${basedir}/test" />
@@ -437,8 +444,9 @@
         <delete file="${jdiff.xml.dir}\${name}_${version}.xml" />
         <delete dir="${build.dir}" />
         <delete dir="${src.gen.dot.parser.dir}" />
-        <delete file="${output.jarfile.backcompat}" />
-        <delete file="${output.jarfile.backcompat.withouthadoop}" />
+        <delete>
+            <fileset dir="${basedir}" includes="pig*.jar" />
+        </delete>
         <ant dir="${test.e2e.dir}" target="clean"/>
     </target>
 
@@ -678,6 +686,18 @@
     <!-- ================================================================== -->
     <target name="jar-all" depends="jar,jar-withouthadoop" description="Create pig, pig
withdependencies and pig withouthadoop jar" />
 
+    <target name="jar-all-h12" description="Create pig, pig withdependencies and pig withouthadoop
jar for both Hadoop 1 and Hadoop 2">
+        <propertyreset name="hadoopversion" value="23" />
+        <propertyreset name="src.shims.dir" value="${basedir}/shims/src/hadoop${hadoopversion}"
/>
+        <antcall target="clean" inheritRefs="true" inheritall="true"/>
+        <antcall target="jar-withouthadoop" inheritRefs="true" inheritall="true"/>
+        <delete dir="${build.dir}" />
+        <propertyreset name="hadoopversion" value="20" />
+        <propertyreset name="src.shims.dir" value="${basedir}/shims/src/hadoop${hadoopversion}"
/>
+        <antcall target="jar" inheritRefs="true" inheritall="true"/>
+        <antcall target="jar-withouthadoop" inheritRefs="true" inheritall="true"/>
+    </target>
+
     <!-- ================================================================== -->
     <!-- Make pig.jar                                                       -->
     <!-- ================================================================== -->
@@ -686,30 +706,14 @@
     <target name="jar" depends="compile,ivy-buildJar" description="Create pig withdependecies
jar">
         <buildJar svnString="${svn.revision}" outputFile="${output.jarfile.core}" includedJars="core.dependencies.jar"/>
         <buildJar svnString="${svn.revision}" outputFile="${output.jarfile.withdependencies}"
includedJars="runtime.dependencies.jar"/>
-        <copy file="${output.jarfile.withdependencies}" tofile="${output.jarfile.backcompat}"/>
-        <antcall target="include-meta" inheritRefs="true" inheritall="true"/>
-    </target>
-
-    <target name="include-meta" if="isHadoop23">
-        <copy todir="${build.classes}/META-INF">
-            <fileset dir="${src.dir}/META-INF" includes="**"/>
-        </copy>
-        <move file="${output.jarfile.withdependencies}" tofile="${output.stage.jarfile.withdependencies}"/>
-        <sleep seconds="1"/>
-        <jar jarfile="${output.jarfile.withdependencies}">
-            <manifest>
-                <attribute name="Main-Class" value="org.apache.pig.Main" />
-                <section name="org/apache/pig">
-                    <attribute name="Implementation-Vendor" value="Apache" />
-                    <attribute name="Implementation-Title" value="Pig" />
-                    <attribute name="Implementation-Version" value="${version}" />
-                    <attribute name="Build-TimeStamp" value="${timestamp}" />
-                    <attribute name="Svn-Revision" value="${svnString}" />
-                </section>
-            </manifest>
-            <zipfileset src="${output.stage.jarfile.withdependencies}"/>
-            <fileset dir="${build.classes}" includes="META-INF/**" />
-        </jar>
+        <antcall target="copyHadoop1">
+            <param name="source" value="${output.jarfile.withdependencies}"/>
+            <param name="dest" value="${output.jarfile.backcompat-h1}"/>
+        </antcall>
+        <antcall target="copyHadoop2">
+            <param name="source" value="${output.jarfile.withdependencies}"/>
+            <param name="dest" value="${output.jarfile.backcompat-h2}"/>
+        </antcall>
     </target>
 
     <!-- ================================================================== -->
@@ -718,7 +722,22 @@
     <target name="jar-withouthadoop" depends="compile,ivy-buildJar" description="Create
pig withouthadoop jar">
         <buildJar svnString="${svn.revision}" outputFile="${output.jarfile.core}" includedJars="core.dependencies.jar"/>
         <buildJar svnString="${svn.revision}" outputFile="${output.jarfile.withouthadoop}"
includedJars="runtime.dependencies-withouthadoop.jar"/>
-        <copy file="${output.jarfile.withouthadoop}" tofile="${output.jarfile.backcompat.withouthadoop}"/>
+        <antcall target="copyHadoop1">
+            <param name="source" value="${output.jarfile.withouthadoop}"/>
+            <param name="dest" value="${output.jarfile.backcompat.withouthadoop-h1}"/>
+        </antcall>
+        <antcall target="copyHadoop2">
+            <param name="source" value="${output.jarfile.withouthadoop}"/>
+            <param name="dest" value="${output.jarfile.backcompat.withouthadoop-h2}"/>
+        </antcall>
+    </target>
+
+    <target name="copyHadoop2" if="isHadoop23">
+        <copy file="${source}" tofile="${dest}"/>
+    </target>
+
+    <target name="copyHadoop1" unless="isHadoop23">
+        <copy file="${source}" tofile="${dest}"/>
     </target>
 
     <scriptdef name="propertyreset" language="javascript"
@@ -738,12 +757,14 @@
         <!-- Move and rename pig jar for Hadoop2/23 to a different location before compiling
for Hadoop1/20 -->
         <move file="${output.jarfile.core}" tofile="${basedir}/${artifact.pig-h2.jar}"/>
         <move file="${output.jarfile.withouthadoop}" tofile="${basedir}/${artifact.pig-withouthadoop-h2.jar}"/>
-        <antcall target="clean" inheritRefs="true" inheritall="true"/>
+        <delete dir="${build.dir}" />
         <propertyreset name="hadoopversion" value="20" />
         <propertyreset name="isHadoop" value="" />
         <propertyreset name="src.shims.dir" value="${basedir}/shims/src/hadoop${hadoopversion}"
/>
         <antcall target="jar" inheritRefs="true" inheritall="true"/>
         <antcall target="jar-withouthadoop" inheritRefs="true" inheritall="true"/>
+        <move file="${output.jarfile.core}" tofile="${output.jarfile.core-h1}"/>
+        <move file="${output.jarfile.withouthadoop}" tofile="${output.jarfile.withouthadoop-h1}"/>
         <move file="${basedir}/${artifact.pig-h2.jar}" tofile="${output.jarfile.core-h2}"/>
         <move file="${basedir}/${artifact.pig-withouthadoop-h2.jar}" tofile="${output.jarfile.withouthadoop-h2}"/>
     </target>
@@ -950,7 +971,16 @@
     <!-- ================================================================== -->
     <!-- Distribution                                                       -->
     <!-- ================================================================== -->
-    <target name="package" depends="docs, api-report, jar, piggybank" description="Create
a Pig tar release">
+    <target name="package-h12" depends="jar-all-h12, docs, api-report, piggybank" description="Create
a Pig tar release">
+		<package-base/>
+    </target>
+	
+    <target name="package" depends="jar-all, docs, api-report, piggybank" description="Create
a Pig tar release">
+		<package-base/>
+    </target>
+	
+    <macrodef name="package-base">
+      <sequential>
         <mkdir dir="${tar.dist.dir}" />
         <mkdir dir="${tar.dist.dir}/lib" />
         <mkdir dir="${tar.dist.dir}/conf" />
@@ -977,9 +1007,11 @@
             <fileset dir="${ivy.lib.dir}" includes="json-simple-*.jar"/>
         </copy>
 
-        <copy file="${output.jarfile.backcompat.withouthadoop}" tofile="${tar.dist.dir}/${final.name}-withouthadoop.jar"
/>
+        <copy file="${output.jarfile.backcompat.withouthadoop-h1}" tofile="${tar.dist.dir}/${final.name}-withouthadoop-h1.jar"
failonerror="false"/>
+        <copy file="${output.jarfile.backcompat.withouthadoop-h2}" tofile="${tar.dist.dir}/${final.name}-withouthadoop-h2.jar"
failonerror="false"/>
 
-        <copy file="${output.jarfile.backcompat}" tofile="${tar.dist.dir}/${final.name}.jar"
/>
+        <copy file="${output.jarfile.backcompat-h1}" tofile="${tar.dist.dir}/${final.name}-h1.jar"
failonerror="false"/>
+        <copy file="${output.jarfile.backcompat-h2}" tofile="${tar.dist.dir}/${final.name}-h2.jar"
failonerror="false"/>
 
         <copy todir="${tar.dist.dir}/lib" file="contrib/piggybank/java/piggybank.jar"/>
 
@@ -1042,12 +1074,22 @@
         <chmod perm="ugo+x" type="file">
             <fileset dir="${tar.dist.dir}/bin" />
         </chmod>
-    </target>
+      </sequential>
+    </macrodef>
 
     <!-- ================================================================== -->
     <!-- Make release packages                                              -->
     <!-- ================================================================== -->
     <target name="tar" depends="package" description="Source distribution">
+		<tar-base/>
+    </target>
+
+    <target name="tar-h12" depends="package-h12" description="Source distribution">
+		<tar-base/>
+    </target>
+
+    <macrodef name="tar-base">
+      <sequential>
         <tar compression="gzip" longfile="gnu" destfile="${build.dir}/${artifact.pig.tar}">
             <tarfileset dir="${build.dir}/tar/" mode="664">
                 <exclude name="${final.name}/bin/*" />
@@ -1057,8 +1099,8 @@
                 <include name="${final.name}/bin/*" />
             </tarfileset>
         </tar>
-    </target>
-
+      </sequential>
+    </macrodef>
     <!-- ================================================================== -->
     <!-- Make release tarball                                               -->
     <!-- ================================================================== -->
@@ -1131,11 +1173,11 @@
           uri="urn:maven-artifact-ant"
           classpathref="mvn-ant-task.classpath"/>
     </target>
-    <target name="mvn-install" depends="mvn-taskdef,jar, set-version, source-jar,
+    <target name="mvn-install" depends="mvn-taskdef,mvn-jar, set-version, source-jar,
       javadoc-jar, pigunit-jar, smoketests-jar, piggybank"
          description="To install pig to local filesystem's m2 cache">
          <artifact:pom file="${pig.pom}" id="pig"/>
-          <artifact:install file="${output.jarfile.core}">
+          <artifact:install file="${output.jarfile.core-h1}">
                <pom refid="pig"/>
            <attach file="${output.jarfile.sources}" classifier="sources" />
            <attach file="${output.jarfile.javadoc}" classifier="javadoc" />
@@ -1211,7 +1253,7 @@
     <target name="simpledeploy" unless="staging">
       <artifact:pom file="${pig.pom}" id="pig"/>
       <artifact:install-provider artifactId="wagon-http" version="${wagon-http.version}"/>
-      <artifact:deploy file="${output.jarfile.core}">
+      <artifact:deploy file="${output.jarfile.core-h1}">
               <remoteRepository id="${snapshots_repo_id}" url="${asfsnapshotrepo}"/>
               <pom refid="pig"/>
               <attach file="${output.jarfile.core-h2}" classifier="h2" />



Mime
View raw message