hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tomwh...@apache.org
Subject svn commit: r1153184 [1/2] - in /hadoop/common/trunk: ./ dev-support/ hadoop-annotations/ hadoop-annotations/src/ hadoop-annotations/src/main/ hadoop-annotations/src/main/java/ hadoop-annotations/src/main/java/org/ hadoop-annotations/src/main/java/org/...
Date Tue, 02 Aug 2011 16:38:08 GMT
Author: tomwhite
Date: Tue Aug  2 16:37:57 2011
New Revision: 1153184

URL: http://svn.apache.org/viewvc?rev=1153184&view=rev
Log:
HADOOP-6671. Use maven for hadoop common builds. Contributed by Alejandro Abdelnur.

Added:
    hadoop/common/trunk/.gitignore
    hadoop/common/trunk/dev-support/
    hadoop/common/trunk/dev-support/smart-apply-patch.sh
      - copied unchanged from r1153176, hadoop/common/trunk/common/src/test/bin/smart-apply-patch.sh
    hadoop/common/trunk/dev-support/test-patch.properties
      - copied, changed from r1153176, hadoop/common/trunk/common/src/test/test-patch.properties
    hadoop/common/trunk/dev-support/test-patch.sh
      - copied, changed from r1153176, hadoop/common/trunk/common/src/test/bin/test-patch.sh
    hadoop/common/trunk/hadoop-annotations/
    hadoop/common/trunk/hadoop-annotations/pom.xml   (with props)
    hadoop/common/trunk/hadoop-annotations/src/
    hadoop/common/trunk/hadoop-annotations/src/main/
    hadoop/common/trunk/hadoop-annotations/src/main/java/
    hadoop/common/trunk/hadoop-annotations/src/main/java/org/
    hadoop/common/trunk/hadoop-annotations/src/main/java/org/apache/
    hadoop/common/trunk/hadoop-annotations/src/main/java/org/apache/hadoop/
    hadoop/common/trunk/hadoop-annotations/src/main/java/org/apache/hadoop/classification/
      - copied from r1153176, hadoop/common/trunk/common/src/java/org/apache/hadoop/classification/
    hadoop/common/trunk/hadoop-assemblies/
    hadoop/common/trunk/hadoop-assemblies/pom.xml   (with props)
    hadoop/common/trunk/hadoop-assemblies/src/
    hadoop/common/trunk/hadoop-assemblies/src/main/
    hadoop/common/trunk/hadoop-assemblies/src/main/resources/
    hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/
    hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml   (with props)
    hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml   (with props)
    hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml   (with props)
    hadoop/common/trunk/hadoop-common/
      - copied from r1153176, hadoop/common/trunk/common/
    hadoop/common/trunk/hadoop-common/BUILDING.txt   (with props)
    hadoop/common/trunk/hadoop-common/dev-support/
    hadoop/common/trunk/hadoop-common/dev-support/checkstyle.xml
      - copied, changed from r1153176, hadoop/common/trunk/common/src/test/checkstyle.xml
    hadoop/common/trunk/hadoop-common/dev-support/findbugsExcludeFile.xml
      - copied unchanged from r1153176, hadoop/common/trunk/common/src/test/findbugsExcludeFile.xml
    hadoop/common/trunk/hadoop-common/dev-support/jdiff/
      - copied from r1153176, hadoop/common/trunk/common/lib/jdiff/
    hadoop/common/trunk/hadoop-common/dev-support/saveVersion.sh
      - copied, changed from r1153176, hadoop/common/trunk/common/src/saveVersion.sh
    hadoop/common/trunk/hadoop-common/pom.xml   (with props)
    hadoop/common/trunk/hadoop-common/src/main/
    hadoop/common/trunk/hadoop-common/src/main/bin/
      - copied from r1153176, hadoop/common/trunk/common/bin/
    hadoop/common/trunk/hadoop-common/src/main/conf/
      - copied from r1153176, hadoop/common/trunk/common/conf/
    hadoop/common/trunk/hadoop-common/src/main/docs/
      - copied from r1153176, hadoop/common/trunk/common/src/docs/
    hadoop/common/trunk/hadoop-common/src/main/java/
      - copied from r1153176, hadoop/common/trunk/common/src/java/
    hadoop/common/trunk/hadoop-common/src/main/native/
      - copied from r1153176, hadoop/common/trunk/common/src/native/
    hadoop/common/trunk/hadoop-common/src/main/packages/
      - copied from r1153176, hadoop/common/trunk/common/src/packages/
    hadoop/common/trunk/hadoop-common/src/main/resources/
    hadoop/common/trunk/hadoop-common/src/main/resources/META-INF/
    hadoop/common/trunk/hadoop-common/src/main/resources/META-INF/services/
    hadoop/common/trunk/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.security.SecurityInfo
    hadoop/common/trunk/hadoop-common/src/main/resources/core-default.xml
      - copied unchanged from r1153176, hadoop/common/trunk/common/src/java/core-default.xml
    hadoop/common/trunk/hadoop-common/src/main/webapps/
      - copied from r1153176, hadoop/common/trunk/common/src/webapps/
    hadoop/common/trunk/hadoop-common/src/main/xsl/
    hadoop/common/trunk/hadoop-common/src/main/xsl/configuration.xsl
      - copied, changed from r1153176, hadoop/common/trunk/common/conf/configuration.xsl
    hadoop/common/trunk/hadoop-common/src/test/java/
    hadoop/common/trunk/hadoop-common/src/test/java/org/
      - copied from r1153176, hadoop/common/trunk/common/src/test/core/org/
    hadoop/common/trunk/hadoop-common/src/test/resources/
    hadoop/common/trunk/hadoop-common/src/test/resources/core-site.xml
      - copied unchanged from r1153176, hadoop/common/trunk/common/src/test/core-site.xml
    hadoop/common/trunk/hadoop-common/src/test/resources/fi-site.xml
      - copied unchanged from r1153176, hadoop/common/trunk/common/src/test/fi-site.xml
    hadoop/common/trunk/hadoop-common/src/test/resources/hadoop-policy.xml
      - copied unchanged from r1153176, hadoop/common/trunk/common/src/test/hadoop-policy.xml
    hadoop/common/trunk/hadoop-common/src/test/resources/krb5.conf
      - copied unchanged from r1153176, hadoop/common/trunk/common/src/test/krb5.conf
    hadoop/common/trunk/hadoop-common/src/test/resources/log4j.properties
      - copied unchanged from r1153176, hadoop/common/trunk/common/src/test/log4j.properties
    hadoop/common/trunk/hadoop-common/src/test/resources/test-fake-default.xml
      - copied unchanged from r1153176, hadoop/common/trunk/common/src/test/test-fake-default.xml
    hadoop/common/trunk/hadoop-common/src/test/resources/test-patch.properties
      - copied unchanged from r1153176, hadoop/common/trunk/common/src/test/test-patch.properties
    hadoop/common/trunk/hadoop-common/src/test/resources/testConf.xml
      - copied unchanged from r1153176, hadoop/common/trunk/common/src/test/core/org/apache/hadoop/cli/testConf.xml
    hadoop/common/trunk/hadoop-common/src/test/resources/webapps/
      - copied from r1153176, hadoop/common/trunk/common/src/test/test-webapps/
    hadoop/common/trunk/hadoop-project/
    hadoop/common/trunk/hadoop-project/pom.xml   (with props)
    hadoop/common/trunk/pom.xml   (with props)
Removed:
    hadoop/common/trunk/hadoop-common/.gitignore
    hadoop/common/trunk/hadoop-common/bin/
    hadoop/common/trunk/hadoop-common/build.xml
    hadoop/common/trunk/hadoop-common/conf/
    hadoop/common/trunk/hadoop-common/ivy.xml
    hadoop/common/trunk/hadoop-common/ivy/hadoop-common-instrumented-template.xml
    hadoop/common/trunk/hadoop-common/ivy/hadoop-common-template.xml
    hadoop/common/trunk/hadoop-common/ivy/hadoop-common-test-template.xml
    hadoop/common/trunk/hadoop-common/ivy/ivysettings.xml
    hadoop/common/trunk/hadoop-common/ivy/libraries.properties
    hadoop/common/trunk/hadoop-common/lib/jdiff/
    hadoop/common/trunk/hadoop-common/src/docs/
    hadoop/common/trunk/hadoop-common/src/fixFontsPath.sh
    hadoop/common/trunk/hadoop-common/src/java/
    hadoop/common/trunk/hadoop-common/src/main/conf/configuration.xsl
    hadoop/common/trunk/hadoop-common/src/main/conf/core-site.xml.template
    hadoop/common/trunk/hadoop-common/src/main/conf/hadoop-env.sh.template
    hadoop/common/trunk/hadoop-common/src/main/conf/hadoop-policy.xml.template
    hadoop/common/trunk/hadoop-common/src/main/conf/masters.template
    hadoop/common/trunk/hadoop-common/src/main/conf/slaves.template
    hadoop/common/trunk/hadoop-common/src/main/java/core-default.xml
    hadoop/common/trunk/hadoop-common/src/main/java/org/apache/hadoop/classification/
    hadoop/common/trunk/hadoop-common/src/main/native/packageNativeHadoop.sh
    hadoop/common/trunk/hadoop-common/src/native/
    hadoop/common/trunk/hadoop-common/src/packages/
    hadoop/common/trunk/hadoop-common/src/saveVersion.sh
    hadoop/common/trunk/hadoop-common/src/test/bin/
    hadoop/common/trunk/hadoop-common/src/test/checkstyle.xml
    hadoop/common/trunk/hadoop-common/src/test/core-site.xml
    hadoop/common/trunk/hadoop-common/src/test/core/org/
    hadoop/common/trunk/hadoop-common/src/test/fi-site.xml
    hadoop/common/trunk/hadoop-common/src/test/findbugsExcludeFile.xml
    hadoop/common/trunk/hadoop-common/src/test/hadoop-policy.xml
    hadoop/common/trunk/hadoop-common/src/test/java/org/apache/hadoop/cli/testConf.xml
    hadoop/common/trunk/hadoop-common/src/test/krb5.conf
    hadoop/common/trunk/hadoop-common/src/test/log4j.properties
    hadoop/common/trunk/hadoop-common/src/test/test-fake-default.xml
    hadoop/common/trunk/hadoop-common/src/test/test-patch.properties
    hadoop/common/trunk/hadoop-common/src/test/test-webapps/
    hadoop/common/trunk/hadoop-common/src/webapps/
Modified:
    hadoop/common/trunk/hadoop-common/CHANGES.txt
    hadoop/common/trunk/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
    hadoop/common/trunk/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSImpl.java
    hadoop/common/trunk/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
    hadoop/common/trunk/hadoop-common/src/main/native/Makefile.am
    hadoop/common/trunk/hadoop-common/src/main/packages/templates/conf/core-site.xml
    hadoop/common/trunk/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
    hadoop/common/trunk/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java
    hadoop/common/trunk/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java
    hadoop/common/trunk/hadoop-common/src/test/system/conf/hadoop-policy-system-test.xml

Added: hadoop/common/trunk/.gitignore
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/.gitignore?rev=1153184&view=auto
==============================================================================
--- hadoop/common/trunk/.gitignore (added)
+++ hadoop/common/trunk/.gitignore Tue Aug  2 16:37:57 2011
@@ -0,0 +1,7 @@
+*.iml
+*.ipr
+*.iws
+.idea
+.svn
+.classpath
+target

Copied: hadoop/common/trunk/dev-support/test-patch.properties (from r1153176, hadoop/common/trunk/common/src/test/test-patch.properties)
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/dev-support/test-patch.properties?p2=hadoop/common/trunk/dev-support/test-patch.properties&p1=hadoop/common/trunk/common/src/test/test-patch.properties&r1=1153176&r2=1153184&rev=1153184&view=diff
==============================================================================
--- hadoop/common/trunk/common/src/test/test-patch.properties (original)
+++ hadoop/common/trunk/dev-support/test-patch.properties Tue Aug  2 16:37:57 2011
@@ -13,6 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-OK_RELEASEAUDIT_WARNINGS=1
+OK_RELEASEAUDIT_WARNINGS=0
 OK_FINDBUGS_WARNINGS=0
-OK_JAVADOC_WARNINGS=6
+OK_JAVADOC_WARNINGS=0

Copied: hadoop/common/trunk/dev-support/test-patch.sh (from r1153176, hadoop/common/trunk/common/src/test/bin/test-patch.sh)
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/dev-support/test-patch.sh?p2=hadoop/common/trunk/dev-support/test-patch.sh&p1=hadoop/common/trunk/common/src/test/bin/test-patch.sh&r1=1153176&r2=1153184&rev=1153184&view=diff
==============================================================================
--- hadoop/common/trunk/common/src/test/bin/test-patch.sh (original)
+++ hadoop/common/trunk/dev-support/test-patch.sh Tue Aug  2 16:37:57 2011
@@ -19,7 +19,7 @@ ulimit -n 1024
 ### SVN_REVISION and BUILD_URL are set by Hudson if it is run by patch process
 ### Read variables from properties file
 bindir=$(dirname $0)
-. $bindir/../test-patch.properties
+. $bindir/test-patch.properties
 
 ###############################################################################
 parseArgs() {
@@ -118,6 +118,7 @@ checkout () {
       echo "$status"
       cleanupAndExit 1
     fi
+    echo
   else   
     cd $BASEDIR
     $SVN revert -R .
@@ -178,8 +179,9 @@ setup () {
   echo "======================================================================"
   echo ""
   echo ""
-  echo "$ANT_HOME/bin/ant  -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/trunkJavacWarnings.txt 2>&1"
- $ANT_HOME/bin/ant -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
+#  echo "$ANT_HOME/bin/ant  -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/trunkJavacWarnings.txt 2>&1"
+# $ANT_HOME/bin/ant -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
+  $MAVEN_HOME/bin/mvn clean compile -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
   if [[ $? != 0 ]] ; then
     echo "Trunk compilation is broken?"
     cleanupAndExit 1
@@ -296,8 +298,11 @@ checkJavadocWarnings () {
   echo ""
   echo ""
   echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= clean javadoc | tee $PATCH_DIR/patchJavadocWarnings.txt"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= clean javadoc | tee $PATCH_DIR/patchJavadocWarnings.txt
-  javadocWarnings=`$GREP -o '\[javadoc\] [0-9]* warning' $PATCH_DIR/patchJavadocWarnings.txt | awk '{total += $2} END {print total}'`
+  (cd root; mvn install)
+  (cd doclet; mvn install)
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= clean javadoc | tee $PATCH_DIR/patchJavadocWarnings.txt
+  $MAVEN_HOME/bin/mvn clean compile javadoc:javadoc -DskipTests -Pdocs -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavadocWarnings.txt 2>&1
+  javadocWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/patchJavadocWarnings.txt | awk '/Javadoc Warnings/,EOF' | $GREP -v 'Javadoc Warnings' | awk 'BEGIN {total = 0} {total += 1} END {print total}'`
   echo ""
   echo ""
   echo "There appear to be $javadocWarnings javadoc warnings generated by the patched build."
@@ -327,8 +332,9 @@ checkJavacWarnings () {
   echo "======================================================================"
   echo ""
   echo ""
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= clean tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= clean tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1
+  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= clean tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= clean tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1
+  $MAVEN_HOME/bin/mvn clean compile -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/patchJavacWarnings.txt 2>&1
   if [[ $? != 0 ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
@@ -337,8 +343,8 @@ checkJavacWarnings () {
   fi
   ### Compare trunk and patch javac warning numbers
   if [[ -f $PATCH_DIR/patchJavacWarnings.txt ]] ; then
-    trunkJavacWarnings=`$GREP -o '\[javac\] [0-9]* warning' $PATCH_DIR/trunkJavacWarnings.txt | awk '{total += $2} END {print total}'`
-    patchJavacWarnings=`$GREP -o '\[javac\] [0-9]* warning' $PATCH_DIR/patchJavacWarnings.txt | awk '{total += $2} END {print total}'`
+    trunkJavacWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/trunkJavacWarnings.txt | awk 'BEGIN {total = 0} {total += 1} END {print total}'`
+    patchJavacWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/patchJavacWarnings.txt | awk 'BEGIN {total = 0} {total += 1} END {print total}'`
     echo "There appear to be $trunkJavacWarnings javac compiler warnings before the patch and $patchJavacWarnings javac compiler warnings after applying the patch."
     if [[ $patchJavacWarnings != "" && $trunkJavacWarnings != "" ]] ; then
       if [[ $patchJavacWarnings -gt $trunkJavacWarnings ]] ; then
@@ -367,8 +373,10 @@ checkReleaseAuditWarnings () {
   echo "======================================================================"
   echo ""
   echo ""
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1
+  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1"
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1
+  $MAVEN_HOME/bin/mvn apache-rat:check -D${PROJECT_NAME}PatchProcess 2>&1
+  find . -name rat.txt | xargs cat > $PATCH_DIR/patchReleaseAuditWarnings.txt
 
   ### Compare trunk and patch release audit warning numbers
   if [[ -f $PATCH_DIR/patchReleaseAuditWarnings.txt ]] ; then
@@ -410,8 +418,10 @@ checkStyle () {
   echo "THIS IS NOT IMPLEMENTED YET"
   echo ""
   echo ""
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= checkstyle"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= checkstyle
+  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= checkstyle"
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= checkstyle
+  $MAVEN_HOME/bin/mvn compile checkstyle:checkstyle -D${PROJECT_NAME}PatchProcess
+
   JIRA_COMMENT_FOOTER="Checkstyle results: $BUILD_URL/artifact/trunk/build/test/checkstyle-errors.html
 $JIRA_COMMENT_FOOTER"
   ### TODO: calculate actual patchStyleErrors
@@ -441,27 +451,28 @@ checkFindbugsWarnings () {
   echo "======================================================================"
   echo ""
   echo ""
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dfindbugs.home=$FINDBUGS_HOME -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" -Dfindbugs.home=${FINDBUGS_HOME} -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs
+  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dfindbugs.home=$FINDBUGS_HOME -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs"
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dfindbugs.home=${FINDBUGS_HOME} -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs
+  $MAVEN_HOME/bin/mvn clean compile findbugs:findbugs -D${PROJECT_NAME}PatchProcess -X
+
   if [ $? != 0 ] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
     -1 findbugs.  The patch appears to cause Findbugs (version ${findbugs_version}) to fail."
     return 1
   fi
-JIRA_COMMENT_FOOTER="Findbugs warnings: $BUILD_URL/artifact/trunk/build/test/findbugs/newPatchFindbugsWarnings.html
+JIRA_COMMENT_FOOTER="Findbugs warnings: $BUILD_URL/artifact/trunk/target/newPatchFindbugsWarnings.html
 $JIRA_COMMENT_FOOTER"
-  cp $BASEDIR/build/test/findbugs/*.xml $PATCH_DIR/patchFindbugsWarnings.xml
+  
+  cp $BASEDIR/hadoop-common/target/findbugsXml.xml $PATCH_DIR/patchFindbugsWarnings.xml
   $FINDBUGS_HOME/bin/setBugDatabaseInfo -timestamp "01/01/2000" \
     $PATCH_DIR/patchFindbugsWarnings.xml \
     $PATCH_DIR/patchFindbugsWarnings.xml
   findbugsWarnings=`$FINDBUGS_HOME/bin/filterBugs -first "01/01/2000" $PATCH_DIR/patchFindbugsWarnings.xml \
-    $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.xml | /usr/bin/awk '{print $1}'`
+    $PATCH_DIR/newPatchFindbugsWarnings.xml | /usr/bin/awk '{print $1}'`
   $FINDBUGS_HOME/bin/convertXmlToText -html \
-    $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.xml \
-    $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.html
-  cp $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.html $PATCH_DIR/newPatchFindbugsWarnings.html
-  cp $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.xml $PATCH_DIR/newPatchFindbugsWarnings.xml
+    $PATCH_DIR/newPatchFindbugsWarnings.xml \
+    $PATCH_DIR/newPatchFindbugsWarnings.html
 
   ### if current warnings greater than OK_FINDBUGS_WARNINGS
   if [[ $findbugsWarnings > $OK_FINDBUGS_WARNINGS ]] ; then
@@ -496,11 +507,12 @@ runCoreTests () {
      PreTestTarget="create-c++-configure"
   fi
 
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME $PreTestTarget test-core"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME  $PreTestTarget test-core
+  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME $PreTestTarget test-core"
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME  $PreTestTarget test-core
+  $MAVEN_HOME/bin/mvn clean test -Pnative -DHadoopPatchProcess
   if [[ $? != 0 ]] ; then
     ### Find and format names of failed tests
-    failed_tests=`grep -l -E "<failure|<error" $WORKSPACE/trunk/build/test/*.xml | sed -e "s|.*build/test/TEST-|                  |g" | sed -e "s|\.xml||g"`
+    failed_tests=`grep -l -E "<failure|<error" $WORKSPACE/trunk/target/hadoop-common/surefire-reports/*.xml | sed -e "s|.*target/surefire-reports/TEST-|                  |g" | sed -e "s|\.xml||g"`
     JIRA_COMMENT="$JIRA_COMMENT
 
     -1 core tests.  The patch failed these core unit tests:
@@ -534,8 +546,9 @@ runContribTests () {
   ### Kill any rogue build processes from the last attempt
   $PS auxwww | $GREP HadoopPatchProcess | /usr/bin/nawk '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
 
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" $ECLIPSE_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no test-contrib"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" $ECLIPSE_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no test-contrib
+  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" $ECLIPSE_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no test-contrib"
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" $ECLIPSE_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no test-contrib
+  echo "NOP"
   if [[ $? != 0 ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
@@ -564,8 +577,9 @@ checkInjectSystemFaults () {
   ### Kill any rogue build processes from the last attempt
   $PS auxwww | $GREP HadoopPatchProcess | /usr/bin/nawk '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
 
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults"
-  $ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults
+  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults"
+  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults
+  echo "NOP"
   if [[ $? != 0 ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 

Added: hadoop/common/trunk/hadoop-annotations/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-annotations/pom.xml?rev=1153184&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-annotations/pom.xml (added)
+++ hadoop/common/trunk/hadoop-annotations/pom.xml Tue Aug  2 16:37:57 2011
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-project</artifactId>
+    <version>0.23.0-SNAPSHOT</version>
+    <relativePath>../hadoop-project</relativePath>
+  </parent>
+  <groupId>org.apache.hadoop</groupId>
+  <artifactId>hadoop-annotations</artifactId>
+  <version>0.23.0-SNAPSHOT</version>
+  <description>Apache Hadoop Annotations</description>
+  <name>Apache Hadoop Annotations</name>
+  <packaging>jar</packaging>
+
+  <dependencies>
+    <dependency>
+      <groupId>jdiff</groupId>
+      <artifactId>jdiff</artifactId>
+      <scope>compile</scope>
+    </dependency>
+  </dependencies>
+
+</project>

Propchange: hadoop/common/trunk/hadoop-annotations/pom.xml
------------------------------------------------------------------------------
    svn:eol-style = native

Added: hadoop/common/trunk/hadoop-assemblies/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-assemblies/pom.xml?rev=1153184&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-assemblies/pom.xml (added)
+++ hadoop/common/trunk/hadoop-assemblies/pom.xml Tue Aug  2 16:37:57 2011
@@ -0,0 +1,99 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project>
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.hadoop</groupId>
+  <artifactId>hadoop-assemblies</artifactId>
+  <version>0.23.0-SNAPSHOT</version>
+  <name>Apache Hadoop Assemblies</name>
+  <description>Apache Hadoop Assemblies</description>
+
+  <properties>
+    <failIfNoTests>false</failIfNoTests>
+  </properties>
+
+  <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-enforcer-plugin</artifactId>
+          <version>1.0</version>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-assembly-plugin</artifactId>
+          <version>2.2-beta-3</version>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.rat</groupId>
+          <artifactId>apache-rat-plugin</artifactId>
+          <version>0.7</version>
+        </plugin>
+      </plugins>
+    </pluginManagement>
+
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-enforcer-plugin</artifactId>
+        <inherited>false</inherited>
+        <configuration>
+          <rules>
+            <requireMavenVersion>
+              <version>[3.0.0,)</version>
+            </requireMavenVersion>
+            <requireJavaVersion>
+              <version>1.6</version>
+            </requireJavaVersion>
+            <requireOS>
+              <family>unix</family>
+            </requireOS>
+          </rules>
+        </configuration>
+        <executions>
+          <execution>
+            <id>clean</id>
+            <goals>
+              <goal>enforce</goal>
+            </goals>
+            <phase>pre-clean</phase>
+          </execution>
+          <execution>
+            <id>default</id>
+            <goals>
+              <goal>enforce</goal>
+            </goals>
+            <phase>validate</phase>
+          </execution>
+          <execution>
+            <id>site</id>
+            <goals>
+              <goal>enforce</goal>
+            </goals>
+            <phase>pre-site</phase>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+      </plugin>
+    </plugins>
+  </build>
+</project>

Propchange: hadoop/common/trunk/hadoop-assemblies/pom.xml
------------------------------------------------------------------------------
    svn:eol-style = native

Added: hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml?rev=1153184&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml (added)
+++ hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml Tue Aug  2 16:37:57 2011
@@ -0,0 +1,113 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<assembly>
+  <id>hadoop-bintar</id>
+  <formats>
+    <format>dir</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>${basedir}/src/main/bin</directory>
+      <outputDirectory>/bin</outputDirectory>
+      <includes>
+        <include>hadoop</include>
+      </includes>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/conf</directory>
+      <outputDirectory>/etc/hadoop</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/bin</directory>
+      <outputDirectory>/libexec</outputDirectory>
+      <includes>
+        <include>hadoop-config.sh</include>
+      </includes>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/bin</directory>
+      <outputDirectory>/sbin</outputDirectory>
+      <includes>
+        <include>*.sh</include>
+      </includes>
+      <excludes>
+        <exclude>hadoop-config.sh</exclude>
+      </excludes>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/packages</directory>
+      <outputDirectory>/sbin</outputDirectory>
+      <includes>
+        <include>*.sh</include>
+      </includes>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}</directory>
+      <outputDirectory>/share/doc/hadoop/${hadoop.component}</outputDirectory>
+      <includes>
+        <include>*.txt</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/webapps</directory>
+      <outputDirectory>/share/hadoop/${hadoop.component}/webapps</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/conf</directory>
+      <outputDirectory>/share/hadoop/${hadoop.component}/templates</outputDirectory>
+      <includes>
+        <include>*-site.xml</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>${project.build.directory}</directory>
+      <outputDirectory>/share/hadoop/${hadoop.component}</outputDirectory>
+      <includes>
+        <include>${project.artifactId}-${project.version}.jar</include>
+        <include>${project.artifactId}-${project.version}-tests.jar</include>
+        <include>${project.artifactId}-${project.version}-sources.jar</include>
+        <include>${project.artifactId}-${project.version}-test-sources.jar</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/dev-support/jdiff</directory>
+      <outputDirectory>/share/hadoop/${hadoop.component}/jdiff</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${project.build.directory}/site/jdiff/xml</directory>
+      <outputDirectory>/share/hadoop/${hadoop.component}/jdiff</outputDirectory>
+    </fileSet>
+  </fileSets>
+  <dependencySets>
+    <dependencySet>
+      <outputDirectory>/share/hadoop/${hadoop.component}/lib</outputDirectory>
+      <unpack>false</unpack>
+      <scope>compile</scope>
+      <useProjectArtifact>false</useProjectArtifact>
+      <excludes>
+        <exclude>org.apache.ant:*:jar</exclude>
+        <exclude>org.apache.hadoop:hadoop-*:jar</exclude>
+        <exclude>jdiff:jdiff:jar</exclude>
+      </excludes>
+    </dependencySet>
+  </dependencySets>
+</assembly>

Propchange: hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml
------------------------------------------------------------------------------
    svn:eol-style = native

Added: hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml?rev=1153184&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml (added)
+++ hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml Tue Aug  2 16:37:57 2011
@@ -0,0 +1,37 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+  <id>hadoop-src</id>
+  <formats>
+    <format>dir</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>${project.basedir}</directory>
+      <outputDirectory>src/</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>**/*.log</exclude>
+        <exclude>**/build/**</exclude>
+        <exclude>**/target/**</exclude>
+      </excludes>
+    </fileSet>
+  </fileSets>
+</assembly>

Propchange: hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
------------------------------------------------------------------------------
    svn:eol-style = native

Added: hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml?rev=1153184&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml (added)
+++ hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml Tue Aug  2 16:37:57 2011
@@ -0,0 +1,85 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<assembly>
+  <id>hadoop-tar</id>
+  <formats>
+    <format>dir</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>${basedir}</directory>
+      <outputDirectory>/</outputDirectory>
+      <includes>
+        <include>*.txt</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/bin</directory>
+      <outputDirectory>/bin</outputDirectory>
+      <includes>
+        <include>*</include>
+      </includes>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/bin</directory>
+      <outputDirectory>/libexec</outputDirectory>
+      <includes>
+        <include>hadoop-config.sh</include>
+      </includes>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/conf</directory>
+      <outputDirectory>/conf</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/src/main/webapps</directory>
+      <outputDirectory>/webapps</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${project.build.directory}/site</directory>
+      <outputDirectory>/docs</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${project.build.directory}</directory>
+      <outputDirectory>/</outputDirectory>
+      <includes>
+        <include>${project.artifactId}-${project.version}.jar</include>
+        <include>${project.artifactId}-${project.version}-tests.jar</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>${project.build.directory}/src</directory>
+      <outputDirectory>/src</outputDirectory>
+    </fileSet>
+  </fileSets>
+  <dependencySets>
+    <dependencySet>
+      <outputDirectory>/lib</outputDirectory>
+      <unpack>false</unpack>
+      <scope>compile</scope>
+      <useProjectArtifact>false</useProjectArtifact>
+      <excludes>
+        <exclude>org.apache.ant:*:jar</exclude>
+        <exclude>org.apache.hadoop:hadoop-*:jar</exclude>
+        <exclude>jdiff:jdiff:jar</exclude>
+      </excludes>
+    </dependencySet>
+  </dependencySets>
+</assembly>

Propchange: hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml
------------------------------------------------------------------------------
    svn:eol-style = native

Added: hadoop/common/trunk/hadoop-common/BUILDING.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common/BUILDING.txt?rev=1153184&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common/BUILDING.txt (added)
+++ hadoop/common/trunk/hadoop-common/BUILDING.txt Tue Aug  2 16:37:57 2011
@@ -0,0 +1,61 @@
+----------------------------------------------------------------------------------
+Requirements:
+
+* Unix System
+* JDK 1.6
+* Maven 3.0
+* Forrest 0.8 (if generating docs)
+* Findbugs 1.3.9 (if running findbugs)
+* Autotools (if compiling native code)
+* Internet connection for first build (to fetch all Maven and Hadoop dependencies)
+
+----------------------------------------------------------------------------------
+Maven modules:
+
+  hadoop                      (Main Hadoop project)
+         - hadoop-project     (Parent POM for all Hadoop Maven modules.             )
+                              (All plugins & dependencies versions are defined here.)
+         - hadoop-annotations (Generates the Hadoop doclet used to generated the Javadocs)
+         - hadoop-common      (Hadoop common)
+
+----------------------------------------------------------------------------------
+Where to run Maven from?
+
+  It can be run from any module. The only catch is that if not run from utrunk
+  all modules that are not part of the build run must be installed in the local
+  Maven cache or available in a Maven repository.
+
+----------------------------------------------------------------------------------
+Maven build goals:
+
+ * Clean                     : mvn clean
+ * Compile                   : mvn compile [-Pnative]
+ * Run tests                 : mvn test [-Pnative]
+ * Create JAR                : mvn package
+ * Run findbugs              : mvn compile findbugs:findbugs
+ * Run checkstyle            : mvn compile checkstyle:checkstyle
+ * Install JAR in M2 cache   : mvn install
+ * Deploy JAR to Maven repo  : mvn deploy
+ * Run clover                : mvn test -Pclover [-DcloverLicenseLocation=${user.name}/.clover.license]
+ * Run Rat                   : mvn apache-rat:check
+ * Build javadocs            : mvn javadoc:javadoc
+ * Build TAR                 : mvn package [-Ptar][-Pbintar][-Pdocs][-Psrc][-Pnative]
+
+ Build options:
+
+  * Use -Pnative to compile/bundle native code
+  * Use -Dsnappy.prefix=(/usr/local) & -Dbundle.snappy=(false) to compile
+    Snappy JNI bindings and to bundle Snappy SO files
+  * Use -Pdocs to generate & bundle the documentation in the TAR (using -Ptar)
+  * Use -Psrc to bundle the source in the TAR (using -Ptar)
+
+   Tests options:
+
+  * Use -DskipTests to skip tests when running the following Maven goals:
+    'package',  'install', 'deploy' or 'verify'
+  * -Dtest=<TESTCLASSNAME>,....
+  * -Dtest.exclude=<TESTCLASSNAME>
+  * -Dtest.exclude.pattern=**/<TESTCLASSNAME1>.java,**/<TESTCLASSNAME2>.java
+
+
+----------------------------------------------------------------------------------

Propchange: hadoop/common/trunk/hadoop-common/BUILDING.txt
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: hadoop/common/trunk/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common/CHANGES.txt?rev=1153184&r1=1153176&r2=1153184&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-common/CHANGES.txt Tue Aug  2 16:37:57 2011
@@ -300,6 +300,9 @@ Trunk (unreleased changes)
     HADOOP-7178. Add a parameter, useRawLocalFileSystem, to copyToLocalFile(..)
     in FileSystem.  (Uma Maheswara Rao G via szetszwo)
 
+    HADOOP-6671. Use maven for hadoop common builds. (Alejandro Abdelnur
+    via tomwhite)
+
   OPTIMIZATIONS
   
     HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole

Copied: hadoop/common/trunk/hadoop-common/dev-support/checkstyle.xml (from r1153176, hadoop/common/trunk/common/src/test/checkstyle.xml)
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common/dev-support/checkstyle.xml?p2=hadoop/common/trunk/hadoop-common/dev-support/checkstyle.xml&p1=hadoop/common/trunk/common/src/test/checkstyle.xml&r1=1153176&r2=1153184&rev=1153184&view=diff
==============================================================================
--- hadoop/common/trunk/common/src/test/checkstyle.xml (original)
+++ hadoop/common/trunk/hadoop-common/dev-support/checkstyle.xml Tue Aug  2 16:37:57 2011
@@ -51,7 +51,7 @@
 
     <!-- Checks that a package.html file exists for each package.     -->
     <!-- See http://checkstyle.sf.net/config_javadoc.html#PackageHtml -->
-    <module name="PackageHtml"/>
+    <module name="JavadocPackage"/>
 
     <!-- Checks whether files end with a new line.                        -->
     <!-- See http://checkstyle.sf.net/config_misc.html#NewlineAtEndOfFile -->
@@ -61,6 +61,8 @@
     <!-- See http://checkstyle.sf.net/config_misc.html#Translation -->
     <module name="Translation"/>
 
+    <module name="FileLength"/>
+    <module name="FileTabCharacter"/>
 
     <module name="TreeWalker">
 
@@ -112,7 +114,6 @@
 
         <!-- Checks for Size Violations.                    -->
         <!-- See http://checkstyle.sf.net/config_sizes.html -->
-        <module name="FileLength"/>
         <module name="LineLength"/>
         <module name="MethodLength"/>
         <module name="ParameterNumber"/>
@@ -126,7 +127,6 @@
         <module name="NoWhitespaceBefore"/>
         <module name="ParenPad"/>
         <module name="TypecastParenPad"/>
-        <module name="TabCharacter"/>
         <module name="WhitespaceAfter">
 	    	<property name="tokens" value="COMMA, SEMI"/>
 		</module>

Copied: hadoop/common/trunk/hadoop-common/dev-support/saveVersion.sh (from r1153176, hadoop/common/trunk/common/src/saveVersion.sh)
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common/dev-support/saveVersion.sh?p2=hadoop/common/trunk/hadoop-common/dev-support/saveVersion.sh&p1=hadoop/common/trunk/common/src/saveVersion.sh&r1=1153176&r2=1153184&rev=1153184&view=diff
==============================================================================
--- hadoop/common/trunk/common/src/saveVersion.sh (original)
+++ hadoop/common/trunk/hadoop-common/dev-support/saveVersion.sh Tue Aug  2 16:37:57 2011
@@ -43,14 +43,20 @@ else
   branch="Unknown"
   url="file://$cwd"
 fi
-srcChecksum=`find src -name '*.java' | LC_ALL=C sort | xargs md5sum | md5sum | cut -d ' ' -f 1`
 
-mkdir -p $build_dir/src/org/apache/hadoop
+which md5sum > /dev/null
+if [ "$?" = "0" ] ; then
+  srcChecksum=`find src/main/java -name '*.java' | LC_ALL=C sort | xargs md5sum | md5sum | cut -d ' ' -f 1`
+else
+  srcChecksum="Not Available"
+fi
+
+mkdir -p $build_dir/org/apache/hadoop
 cat << EOF | \
   sed -e "s/VERSION/$version/" -e "s/USER/$user/" -e "s/DATE/$date/" \
       -e "s|URL|$url|" -e "s/REV/$revision/" \
       -e "s|BRANCH|$branch|" -e "s/SRCCHECKSUM/$srcChecksum/" \
-      > $build_dir/src/org/apache/hadoop/package-info.java
+      > $build_dir/org/apache/hadoop/package-info.java
 /*
  * Generated by src/saveVersion.sh
  */

Added: hadoop/common/trunk/hadoop-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common/pom.xml?rev=1153184&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common/pom.xml (added)
+++ hadoop/common/trunk/hadoop-common/pom.xml Tue Aug  2 16:37:57 2011
@@ -0,0 +1,1019 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-project</artifactId>
+    <version>0.23.0-SNAPSHOT</version>
+    <relativePath>../hadoop-project</relativePath>
+  </parent>
+  <groupId>org.apache.hadoop</groupId>
+  <artifactId>hadoop-common</artifactId>
+  <version>0.23.0-SNAPSHOT</version>
+  <description>Apache Hadoop Common</description>
+  <name>Apache Hadoop Common</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <test.build.data>${project.build.directory}/test/data</test.build.data>
+    <hadoop.log.dir>${project.build.directory}/log</hadoop.log.dir>
+    <test.build.webapps>${project.build.directory}/test-classes/webapps</test.build.webapps>
+    <test.cache.data>${project.build.directory}/test-classes</test.cache.data>
+    <test.build.classes>${project.build.directory}/test-classes</test.build.classes>
+
+    <build.platform>${os.name}-${os.arch}-${sun.arch.data.model}</build.platform>
+    <snappy.prefix>/usr/local</snappy.prefix>
+    <snappy.lib>${snappy.prefix}/lib</snappy.lib>
+    <bundle.snappy>false</bundle.snappy>
+    
+    <hadoop.component>common</hadoop.component>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-annotations</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-cli</groupId>
+      <artifactId>commons-cli</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-math</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>xmlenc</groupId>
+      <artifactId>xmlenc</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-httpclient</groupId>
+      <artifactId>commons-httpclient</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-codec</groupId>
+      <artifactId>commons-codec</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-net</groupId>
+      <artifactId>commons-net</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>javax.servlet</groupId>
+      <artifactId>servlet-api</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>jetty</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>jetty-util</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>tomcat</groupId>
+      <artifactId>jasper-compiler</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>tomcat</groupId>
+      <artifactId>jasper-runtime</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>javax.servlet.jsp</groupId>
+      <artifactId>jsp-api</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-el</groupId>
+      <artifactId>commons-el</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging-api</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>net.java.dev.jets3t</groupId>
+      <artifactId>jets3t</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.mina</groupId>
+      <artifactId>mina-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ftpserver</groupId>
+      <artifactId>ftplet-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ftpserver</groupId>
+      <artifactId>ftpserver-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ftpserver</groupId>
+      <artifactId>ftpserver-deprecated</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-lang</groupId>
+      <artifactId>commons-lang</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-collections</groupId>
+      <artifactId>commons-collections</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-configuration</groupId>
+      <artifactId>commons-configuration</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>hsqldb</groupId>
+      <artifactId>hsqldb</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-log4j12</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jdt</groupId>
+      <artifactId>core</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>oro</groupId>
+      <artifactId>oro</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.aspectj</groupId>
+      <artifactId>aspectjrt</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>avro</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>net.sf.kosmosfs</groupId>
+      <artifactId>kfs</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ant</groupId>
+      <artifactId>ant</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+      <scope>compile</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <forkMode>always</forkMode>
+          <forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
+          <argLine>-Xmx1024m</argLine>
+          <environmentVariables>
+            <LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/lib</LD_LIBRARY_PATH>
+          </environmentVariables>
+          <systemPropertyVariables>
+
+            <!-- TODO: all references in testcases should be updated to this default -->
+            <test.build.data>${test.build.data}</test.build.data>
+            <test.build.webapps>${test.build.webapps}</test.build.webapps>
+            <test.cache.data>${test.cache.data}</test.cache.data>
+            <hadoop.log.dir>${hadoop.log.dir}</hadoop.log.dir>
+            <test.build.classes>${test.build.classes}</test.build.classes>
+
+            <java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
+            <java.security.krb5.conf>${basedir}/src/test/resources/krb5.conf</java.security.krb5.conf>
+          </systemPropertyVariables>
+          <includes>
+            <include>**/Test*.java</include>
+          </includes>
+          <excludes>
+            <exclude>**/${test.exclude}.java</exclude>
+            <exclude>${test.exclude.pattern}</exclude>
+            <exclude>**/Test*$*.java</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <phase>prepare-package</phase>
+            <goals>
+              <goal>jar</goal>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-source-plugin</artifactId>
+        <executions>
+          <execution>
+            <phase>prepare-package</phase>
+            <goals>
+              <goal>jar</goal>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <attach>true</attach>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+        <configuration>
+          <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>save-version</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <mkdir dir="${project.build.directory}/generated-src/main/java"/>
+                <exec executable="sh">
+                  <arg
+                      line="${basedir}/dev-support/saveVersion.sh ${project.version} ${project.build.directory}/generated-src/main/java"/>
+                </exec>
+              </target>
+            </configuration>
+          </execution>
+          <execution>
+            <id>generate-test-sources</id>
+            <phase>generate-test-sources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+
+                <mkdir dir="${project.build.directory}/generated-src/test/java"/>
+
+                <taskdef name="recordcc" classname="org.apache.hadoop.record.compiler.ant.RccTask">
+                  <classpath refid="maven.compile.classpath"/>
+                </taskdef>
+                <recordcc destdir="${project.build.directory}/generated-src/test/java">
+                  <fileset dir="${basedir}/src/test/ddl" includes="**/*.jr"/>
+                </recordcc>
+
+                <taskdef name="schema" classname="org.apache.avro.specific.SchemaTask">
+                  <classpath refid="maven.test.classpath"/>
+                </taskdef>
+                <schema destdir="${project.build.directory}/generated-src/test/java">
+                  <fileset dir="${basedir}/src/test">
+                    <include name="**/*.avsc"/>
+                  </fileset>
+                </schema>
+
+                <taskdef name="schema" classname="org.apache.avro.specific.ProtocolTask">
+                  <classpath refid="maven.test.classpath"/>
+                </taskdef>
+                <schema destdir="${project.build.directory}/generated-src/test/java">
+                  <fileset dir="${basedir}/src/test">
+                    <include name="**/*.avpr"/>
+                  </fileset>
+                </schema>
+              </target>
+            </configuration>
+          </execution>
+          <execution>
+            <id>create-log-dir</id>
+            <phase>process-test-resources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <!--
+                TODO: there are tests (TestLocalFileSystem#testCopy) that fail if data
+                TODO: from a previous run is present
+                -->
+                <delete dir="${test.build.data}"/>
+                <mkdir dir="${hadoop.log.dir}"/>
+
+                <copy toDir="${project.build.directory}/test-classes">
+                  <fileset dir="${basedir}/src/main/conf"/>
+                </copy>
+              </target>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>add-source</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>${project.build.directory}/generated-src/main/java</source>
+              </sources>
+            </configuration>
+          </execution>
+          <execution>
+            <id>add-test-source</id>
+            <phase>generate-test-sources</phase>
+            <goals>
+              <goal>add-test-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>${project.build.directory}/generated-src/test/java</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-checkstyle-plugin</artifactId>
+        <configuration>
+          <configLocation>file://${basedir}/dev-support/checkstyle.xml</configLocation>
+          <failOnViolation>false</failOnViolation>
+          <format>xml</format>
+          <format>html</format>
+          <outputFile>${project.build.directory}/test/checkstyle-errors.xml</outputFile>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>CHANGES.txt</exclude>
+            <exclude>.idea/**</exclude>
+            <exclude>src/main/conf/*</exclude>
+            <exclude>src/main/docs/**</exclude>
+            <exclude>dev-support/jdiff/**</exclude>
+            <exclude>src/main/native/*</exclude>
+            <exclude>src/main/native/config/*</exclude>
+            <exclude>src/main/resources/META-INF/services/org.apache.hadoop.security.SecurityInfo</exclude>
+            <exclude>src/main/native/m4/*</exclude>
+            <exclude>src/test/empty-file</exclude>
+            <exclude>src/test/all-tests</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-javadoc-plugin</artifactId>
+        <configuration>
+          <linksource>true</linksource>
+          <quiet>true</quiet>
+          <verbose>false</verbose>
+          <source>${maven.compile.source}</source>
+          <charset>${maven.compile.encoding}</charset>
+          <reportOutputDirectory>${project.build.directory}/site</reportOutputDirectory>
+          <destDir>api</destDir>
+          <groups>
+            <group>
+              <title>${project.name} API</title>
+              <packages>org.apache.hadoop*</packages>
+            </group>
+          </groups>
+          <doclet>org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsStandardDoclet</doclet>
+          <docletArtifacts>
+            <docletArtifact>
+              <groupId>org.apache.hadoop</groupId>
+              <artifactId>hadoop-annotations</artifactId>
+              <version>${project.version}</version>
+            </docletArtifact>
+          </docletArtifacts>
+          <useStandardDocletOptions>true</useStandardDocletOptions>
+
+          <!-- switch on dependency-driven aggregation -->
+          <includeDependencySources>true</includeDependencySources>
+
+          <dependencySourceIncludes>
+            <!-- include ONLY dependencies I control -->
+            <dependencySourceInclude>org.apache.hadoop:hadoop-annotations</dependencySourceInclude>
+          </dependencySourceIncludes>
+
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <profiles>
+    <profile>
+      <id>native</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <properties>
+        <snappy.prefix>/usr/local</snappy.prefix>
+        <snappy.lib>${snappy.prefix}/lib</snappy.lib>
+        <snappy.include>${snappy.prefix}/include</snappy.include>
+      </properties>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>compile</id>
+                <phase>compile</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+                    <mkdir dir="${project.build.directory}/native/javah"/>
+                    <copy toDir="${project.build.directory}/native">
+                      <fileset dir="${basedir}/src/main/native"/>
+                    </copy>
+                    <mkdir dir="${project.build.directory}/native/m4"/>
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>native-maven-plugin</artifactId>
+            <executions>
+              <execution>
+                <phase>compile</phase>
+                <goals>
+                  <goal>javah</goal>
+                </goals>
+                <configuration>
+                  <javahPath>${env.JAVA_HOME}/bin/javah</javahPath>
+                  <javahClassNames>
+                    <javahClassName>org.apache.hadoop.io.compress.zlib.ZlibCompressor</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.compress.zlib.ZlibDecompressor</javahClassName>
+                    <javahClassName>org.apache.hadoop.security.JniBasedUnixGroupsMapping</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.nativeio.NativeIO</javahClassName>
+                    <javahClassName>org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.compress.snappy.SnappyCompressor</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.compress.snappy.SnappyDecompressor</javahClassName>
+                  </javahClassNames>
+                  <javahOutputDirectory>${project.build.directory}/native/javah</javahOutputDirectory>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>make-maven-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>compile</id>
+                <phase>compile</phase>
+                <goals>
+                  <goal>autoreconf</goal>
+                  <goal>configure</goal>
+                  <goal>make-install</goal>
+                </goals>
+              </execution>
+            </executions>
+            <configuration>
+              <!-- autoreconf settings -->
+              <workDir>${project.build.directory}/native</workDir>
+              <arguments>
+                <argument>-i</argument>
+                <argument>-f</argument>
+              </arguments>
+
+              <!-- configure settings -->
+              <configureEnvironment>
+                <property>
+                  <name>OS_NAME</name>
+                  <value>${os.name}</value>
+                </property>
+                <property>
+                  <name>OS_ARCH</name>
+                  <value>${os.arch}</value>
+                </property>
+                <property>
+                  <name>JVM_DATA_MODEL</name>
+                  <value>${sun.arch.data.model}</value>
+                </property>
+              </configureEnvironment>
+              <configureOptions>
+                <configureOption>CPPFLAGS=-I${snappy.include}</configureOption>
+                <configureOption>LDFLAGS=-L${snappy.lib}</configureOption>
+              </configureOptions>
+              <configureWorkDir>${project.build.directory}/native</configureWorkDir>
+              <prefix>/usr/local</prefix>
+
+              <!-- make settings -->
+              <installEnvironment>
+                <property>
+                  <name>OS_NAME</name>
+                  <value>${os.name}</value>
+                </property>
+                <property>
+                  <name>OS_ARCH</name>
+                  <value>${os.arch}</value>
+                </property>
+                <property>
+                  <name>JVM_DATA_MODEL</name>
+                  <value>${sun.arch.data.model}</value>
+                </property>
+                <property>
+                  <name>HADOOP_NATIVE_SRCDIR</name>
+                  <value>${project.build.directory}/native</value>
+                </property>
+              </installEnvironment>
+
+              <!-- configure & make settings -->
+              <destDir>${project.build.directory}/native/target</destDir>
+
+            </configuration>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+
+    <profile>
+      <id>docs</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <properties>
+        <jdiff.stable.api>0.20.2</jdiff.stable.api>
+        <jdiff.stability>-unstable</jdiff.stability>
+        <jdiff.compatibility></jdiff.compatibility>
+        <jdiff.javadoc.maxmemory>512m</jdiff.javadoc.maxmemory>
+      </properties>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-javadoc-plugin</artifactId>
+            <executions>
+              <execution>
+                <goals>
+                  <goal>javadoc</goal>
+                </goals>
+                <phase>prepare-package</phase>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>findbugs-maven-plugin</artifactId>
+            <executions>
+              <execution>
+                <goals>
+                  <goal>findbugs</goal>
+                </goals>
+                <phase>prepare-package</phase>
+              </execution>
+            </executions>
+            <configuration>
+              <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
+            </configuration>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-dependency-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>site</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>copy</goal>
+                </goals>
+                <configuration>
+                  <artifactItems>
+                    <artifactItem>
+                      <groupId>jdiff</groupId>
+                      <artifactId>jdiff</artifactId>
+                      <version>${jdiff.version}</version>
+                      <overWrite>false</overWrite>
+                      <outputDirectory>${project.build.directory}</outputDirectory>
+                      <destFileName>jdiff.jar</destFileName>
+                    </artifactItem>
+                    <artifactItem>
+                      <groupId>org.apache.hadoop</groupId>
+                      <artifactId>hadoop-annotations</artifactId>
+                      <version>${hadoop.annotations.version}</version>
+                      <overWrite>false</overWrite>
+                      <outputDirectory>${project.build.directory}</outputDirectory>
+                      <destFileName>hadoop-annotations.jar</destFileName>
+                    </artifactItem>
+                  </artifactItems>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>site</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+
+                    <mkdir dir="${project.build.directory}/docs-src"/>
+
+                    <copy todir="${project.build.directory}/docs-src">
+                      <fileset dir="${basedir}/src/main/docs"/>
+                    </copy>
+
+                    <!-- Docs -->
+                    <exec dir="${project.build.directory}/docs-src"
+                          executable="${env.FORREST_HOME}/bin/forrest"
+                          failonerror="true">
+                    </exec>
+                    <copy todir="${project.build.directory}/site">
+                      <fileset dir="${project.build.directory}/docs-src/build/site"/>
+                    </copy>
+                    <copy file="${project.build.directory}/docs-src/releasenotes.html"
+                          todir="${project.build.directory}/site"/>
+                    <style basedir="${basedir}/src/main/resources"
+                           destdir="${project.build.directory}/site"
+                           includes="core-default.xml"
+                           style="${basedir}/src/main/xsl/configuration.xsl"/>
+
+                    <!-- Convert 'CHANGES.txt' to 'changes.html" -->
+                    <exec executable="perl" input="${basedir}/../CHANGES.txt"
+                          output="${project.build.directory}/site/changes.html"
+                          failonerror="true">
+                      <arg value="${project.build.directory}/docs-src/changes/changes2html.pl"/>
+                    </exec>
+                    <copy todir="${project.build.directory}/site">
+                      <fileset dir="${project.build.directory}/docs-src/changes" includes="*.css"/>
+                    </copy>
+
+                    <!-- Jdiff -->
+                    <mkdir dir="${project.build.directory}/site/jdiff/xml"/>
+
+                    <javadoc maxmemory="${jdiff.javadoc.maxmemory}" verbose="yes">
+                      <doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
+                              path="${project.build.directory}/hadoop-annotations.jar:${project.build.directory}/jdiff.jar">
+                        <param name="-apidir" value="${project.build.directory}/site/jdiff/xml"/>
+                        <param name="-apiname" value="hadoop-core ${project.version}"/>
+                        <param name="${jdiff.stability}"/>
+                      </doclet>
+                      <packageset dir="${basedir}/src/main/java"/>
+                      <classpath>
+                        <path refid="maven.compile.classpath"/>
+                      </classpath>
+                    </javadoc>
+                    <javadoc sourcepath="${basedir}/src/main/java"
+                             destdir="${project.build.directory}/site/jdiff/xml"
+                             sourceFiles="${basedir}/dev-support/jdiff/Null.java"
+                             maxmemory="${jdiff.javadoc.maxmemory}">
+                      <doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
+                              path="${project.build.directory}/hadoop-annotations.jar:${project.build.directory}/jdiff.jar">
+                        <param name="-oldapi" value="hadoop-core ${jdiff.stable.api}"/>
+                        <param name="-newapi" value="hadoop-core ${project.version}"/>
+                        <param name="-oldapidir" value="${basedir}/dev-support/jdiff"/>
+                        <param name="-newapidir" value="${project.build.directory}/site/jdiff/xml"/>
+                        <param name="-javadocold"
+                               value="http://hadoop.apache.org/docs/${jdiff.stable.api}/api/"/>
+                        <param name="-javadocnew" value="${project.build.directory}/site/api"/>
+                        <param name="-stats"/>
+                        <param name="${jdiff.stability}"/>
+                        <param name="${jdiff.compatibility}"/>
+                      </doclet>
+                      <classpath>
+                        <path refid="maven.compile.classpath"/>
+                      </classpath>
+                    </javadoc>
+
+                    <xslt style="${env.FINDBUGS_HOME}/src/xsl/default.xsl"
+                          in="${project.build.directory}/findbugsXml.xml"
+                          out="${project.build.directory}/site/findbugs.html"/>
+
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+
+    <profile>
+      <id>src</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-assembly-plugin</artifactId>
+            <dependencies>
+              <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-assemblies</artifactId>
+                <version>${hadoop.assemblies.version}</version>
+              </dependency>
+            </dependencies>
+            <executions>
+              <execution>
+                <id>pre-tar-src</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>single</goal>
+                </goals>
+                <configuration>
+                  <appendAssemblyId>false</appendAssemblyId>
+                  <attach>false</attach>
+                  <finalName>${project.artifactId}-${project.version}</finalName>
+                  <descriptorRefs>
+                    <descriptorRef>hadoop-src</descriptorRef>
+                  </descriptorRefs>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+
+    <profile>
+      <id>tar</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>pre-tar</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+                    <!-- Using Unix script to preserve symlinks -->
+                    <echo file="${project.build.directory}/tar-copynativelibs.sh">
+
+                      TAR='tar cf -'
+                      UNTAR='tar xfBp -'
+                      LIB_DIR="${project.build.directory}/native/target/usr/local/lib"
+                      if [ -d $${LIB_DIR} ] ; then
+                      TARGET_DIR="${project.build.directory}/${project.artifactId}-${project.version}/lib/native/${build.platform}"
+                      mkdir -p $${TARGET_DIR}
+                      cd $${LIB_DIR}
+                      $$TAR *hadoop* | (cd $${TARGET_DIR}/; $$UNTAR)
+                      if [ "${bundle.snappy}" = "true" ] ; then
+                      cd ${snappy.lib}
+                      $$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
+                      fi
+                      fi
+                    </echo>
+                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
+                      <arg line="./tar-copynativelibs.sh"/>
+                    </exec>
+                  </target>
+                </configuration>
+              </execution>
+              <execution>
+                <id>tar</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+                    <!-- Using Unix tar to preserve symlinks -->
+                    <exec executable="tar" dir="${project.build.directory}" failonerror="yes">
+                      <arg value="czf"/>
+                      <arg value="${project.build.directory}/${project.artifactId}-${project.version}.tar.gz"/>
+                      <arg value="${project.artifactId}-${project.version}"/>
+                    </exec>
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-assembly-plugin</artifactId>
+            <dependencies>
+              <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-assemblies</artifactId>
+                <version>${hadoop.assemblies.version}</version>
+              </dependency>
+            </dependencies>
+            <executions>
+              <execution>
+                <id>pre-tar</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>single</goal>
+                </goals>
+                <configuration>
+                  <appendAssemblyId>false</appendAssemblyId>
+                  <attach>false</attach>
+                  <finalName>${project.artifactId}-${project.version}</finalName>
+                  <descriptorRefs>
+                    <descriptorRef>hadoop-tar</descriptorRef>
+                  </descriptorRefs>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+
+    <profile>
+      <id>bintar</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>pre-bintar</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+                    <!-- Using Unix script to preserve symlinks -->
+                    <echo file="${project.build.directory}/bintar-copynativelibs.sh">
+
+                      TAR='tar cf -'
+                      UNTAR='tar xfBp -'
+                      LIB_DIR="${project.build.directory}/native/target/usr/local/lib"
+                      if [ -d $${LIB_DIR} ] ; then
+                      TARGET_DIR="${project.build.directory}/${project.artifactId}-${project.version}-bin/lib"
+                      mkdir -p $${TARGET_DIR}
+                      cd $${LIB_DIR}
+                      $$TAR *hadoop* | (cd $${TARGET_DIR}/; $$UNTAR)
+                      if [ "${bundle.snappy}" = "true" ] ; then
+                      cd ${snappy.lib}
+                      $$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
+                      fi
+                      fi
+                    </echo>
+                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
+                      <arg line="./bintar-copynativelibs.sh"/>
+                    </exec>
+                  </target>
+                </configuration>
+              </execution>
+              <execution>
+                <id>bintar</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+                    <!-- Using Unix tar to preserve symlinks -->
+                    <exec executable="tar" dir="${project.build.directory}" failonerror="yes">
+                      <arg value="czf"/>
+                      <arg value="${project.build.directory}/${project.artifactId}-${project.version}-bin.tar.gz"/>
+                      <arg value="${project.artifactId}-${project.version}-bin"/>
+                    </exec>
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-assembly-plugin</artifactId>
+            <dependencies>
+              <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-assemblies</artifactId>
+                <version>${hadoop.assemblies.version}</version>
+              </dependency>
+            </dependencies>
+            <executions>
+              <execution>
+                <id>pre-bintar</id>
+                <phase>prepare-package</phase>
+                <goals>
+                  <goal>single</goal>
+                </goals>
+                <configuration>
+                  <appendAssemblyId>false</appendAssemblyId>
+                  <attach>false</attach>
+                  <finalName>${project.artifactId}-${project.version}-bin</finalName>
+                  <descriptorRefs>
+                    <descriptorRef>hadoop-bintar</descriptorRef>
+                  </descriptorRefs>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+</project>

Propchange: hadoop/common/trunk/hadoop-common/pom.xml
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: hadoop/common/trunk/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java?rev=1153184&r1=1153176&r2=1153184&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java (original)
+++ hadoop/common/trunk/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java Tue Aug  2 16:37:57 2011
@@ -91,7 +91,7 @@ public abstract class AbstractFileSystem
     StringTokenizer tokens = new StringTokenizer(src, Path.SEPARATOR);
     while(tokens.hasMoreTokens()) {
       String element = tokens.nextToken();
-      if (element.equals("..") || 
+      if (element.equals("target/generated-sources") ||
           element.equals(".")  ||
           (element.indexOf(":") >= 0)) {
         return false;



Mime
View raw message