hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From c..@apache.org
Subject svn commit: r950329 - in /hadoop/hdfs/branches/branch-0.21: ./ ivy/ src/c++/libhdfs/ src/contrib/hdfsproxy/ src/java/ src/java/org/apache/hadoop/hdfs/server/datanode/ src/test/aop/build/ src/test/hdfs/ src/test/system/ src/test/system/aop/ src/test/sys...
Date Wed, 02 Jun 2010 01:21:08 GMT
Author: cos
Date: Wed Jun  2 01:21:07 2010
New Revision: 950329

URL: http://svn.apache.org/viewvc?rev=950329&view=rev
Log:
HDFS-1134. svn merge -c 950323 from trunk.
Source code has been adjusted to the differences between 0.21 and trunk

Added:
    hadoop/hdfs/branches/branch-0.21/ivy/hadoop-hdfs-instrumented-template.xml
      - copied, changed from r950323, hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-template.xml
    hadoop/hdfs/branches/branch-0.21/ivy/hadoop-hdfs-instrumented-test-template.xml
      - copied unchanged from r950323, hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-test-template.xml
    hadoop/hdfs/branches/branch-0.21/src/test/system/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/
    hadoop/hdfs/branches/branch-0.21/src/test/system/aop/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/aop/
    hadoop/hdfs/branches/branch-0.21/src/test/system/aop/org/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/aop/org/
    hadoop/hdfs/branches/branch-0.21/src/test/system/aop/org/apache/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/aop/org/apache/
    hadoop/hdfs/branches/branch-0.21/src/test/system/aop/org/apache/hadoop/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/
    hadoop/hdfs/branches/branch-0.21/src/test/system/aop/org/apache/hadoop/hdfs/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/
    hadoop/hdfs/branches/branch-0.21/src/test/system/aop/org/apache/hadoop/hdfs/HDFSPolicyProviderAspect.aj
      - copied unchanged from r950323, hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/HDFSPolicyProviderAspect.aj
    hadoop/hdfs/branches/branch-0.21/src/test/system/aop/org/apache/hadoop/hdfs/server/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/
    hadoop/hdfs/branches/branch-0.21/src/test/system/aop/org/apache/hadoop/hdfs/server/datanode/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/datanode/
    hadoop/hdfs/branches/branch-0.21/src/test/system/aop/org/apache/hadoop/hdfs/server/datanode/DataNodeAspect.aj
      - copied unchanged from r950323, hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/datanode/DataNodeAspect.aj
    hadoop/hdfs/branches/branch-0.21/src/test/system/aop/org/apache/hadoop/hdfs/server/namenode/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/namenode/
    hadoop/hdfs/branches/branch-0.21/src/test/system/aop/org/apache/hadoop/hdfs/server/namenode/NameNodeAspect.aj
      - copied unchanged from r950323, hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/namenode/NameNodeAspect.aj
    hadoop/hdfs/branches/branch-0.21/src/test/system/conf/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/conf/
    hadoop/hdfs/branches/branch-0.21/src/test/system/conf/system-test-hdfs.xml
      - copied unchanged from r950323, hadoop/hdfs/trunk/src/test/system/conf/system-test-hdfs.xml
    hadoop/hdfs/branches/branch-0.21/src/test/system/java/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/java/
    hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/java/org/
    hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/java/org/apache/
    hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/
    hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/
    hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/
    hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/
    hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNClient.java
      - copied unchanged from r950323, hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNClient.java
    hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNProtocol.java
      - copied, changed from r950323, hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNProtocol.java
    hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/HDFSCluster.java
      - copied unchanged from r950323, hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/HDFSCluster.java
    hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/HDFSDaemonClient.java
      - copied unchanged from r950323, hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/HDFSDaemonClient.java
    hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNClient.java
      - copied unchanged from r950323, hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNClient.java
    hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNProtocol.java
      - copied, changed from r950323, hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNProtocol.java
    hadoop/hdfs/branches/branch-0.21/src/test/system/test/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/test/
    hadoop/hdfs/branches/branch-0.21/src/test/system/test/org/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/test/org/
    hadoop/hdfs/branches/branch-0.21/src/test/system/test/org/apache/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/test/org/apache/
    hadoop/hdfs/branches/branch-0.21/src/test/system/test/org/apache/hadoop/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/test/org/apache/hadoop/
    hadoop/hdfs/branches/branch-0.21/src/test/system/test/org/apache/hadoop/hdfs/
      - copied from r950323, hadoop/hdfs/trunk/src/test/system/test/org/apache/hadoop/hdfs/
    hadoop/hdfs/branches/branch-0.21/src/test/system/test/org/apache/hadoop/hdfs/TestHL040.java
      - copied unchanged from r950323, hadoop/hdfs/trunk/src/test/system/test/org/apache/hadoop/hdfs/TestHL040.java
Modified:
    hadoop/hdfs/branches/branch-0.21/   (props changed)
    hadoop/hdfs/branches/branch-0.21/CHANGES.txt
    hadoop/hdfs/branches/branch-0.21/build.xml   (contents, props changed)
    hadoop/hdfs/branches/branch-0.21/ivy.xml
    hadoop/hdfs/branches/branch-0.21/ivy/libraries.properties
    hadoop/hdfs/branches/branch-0.21/src/c++/libhdfs/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/contrib/hdfsproxy/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/java/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/test/aop/build/aop.xml
    hadoop/hdfs/branches/branch-0.21/src/test/hdfs/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/webapps/datanode/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/webapps/hdfs/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/webapps/secondary/   (props changed)

Propchange: hadoop/hdfs/branches/branch-0.21/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun  2 01:21:07 2010
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs:713112
 /hadoop/hdfs/branches/HDFS-265:796829-820463
 /hadoop/hdfs/branches/branch-0.21:820487
-/hadoop/hdfs/trunk:947194
+/hadoop/hdfs/trunk:947194,950323

Modified: hadoop/hdfs/branches/branch-0.21/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/CHANGES.txt?rev=950329&r1=950328&r2=950329&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/CHANGES.txt (original)
+++ hadoop/hdfs/branches/branch-0.21/CHANGES.txt Wed Jun  2 01:21:07 2010
@@ -45,6 +45,8 @@ Release 0.21.0 - Unreleased
 
   NEW FEATURES
 
+    HDFS-1134. Large-scale Automated Framework. (cos)
+
     HDFS-436. Introduce AspectJ framework for HDFS code and tests.
     (Konstantin Boudnik via szetszwo)
 

Modified: hadoop/hdfs/branches/branch-0.21/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/build.xml?rev=950329&r1=950328&r2=950329&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/build.xml (original)
+++ hadoop/hdfs/branches/branch-0.21/build.xml Wed Jun  2 01:21:07 2010
@@ -28,6 +28,8 @@
  
   <property name="Name" value="Hadoop-Hdfs"/>
   <property name="name" value="hadoop-hdfs"/>
+  <!-- Need to change aop.xml project.version prop. synchronously
+   -->
   <property name="version" value="0.21.0-SNAPSHOT"/>
   <property name="final.name" value="${name}-${version}"/>
   <property name="test.hdfs.final.name" value="${name}-test-${version}"/>
@@ -369,20 +371,11 @@
   </target>
 
   <target name="compile-hdfs-test" depends="compile-hdfs-classes, ivy-retrieve-test">
-    <mkdir dir="${test.hdfs.build.classes}"/>
-    <javac 
-      encoding="${build.encoding}" 
-      srcdir="${test.src.dir}/hdfs;${test.src.dir}/unit"
-      includes="org/apache/hadoop/**/*.java"
-      destdir="${test.hdfs.build.classes}"
-      debug="${javac.debug}"
-      optimize="${javac.optimize}"
-      target="${javac.version}"
-      source="${javac.version}"
-      deprecation="${javac.deprecation}">
-      <compilerarg line="${javac.args} ${javac.args.warnings}" />
-      <classpath refid="test.classpath"/>
-    </javac>
+    <macro-compile-hdfs-test
+      target.dir="${test.hdfs.build.classes}"
+      source.dir="${test.src.dir}/hdfs;${test.src.dir}/unit"
+      dest.dir="${test.hdfs.build.classes}"
+      classpath="test.classpath"/>
 
     <delete dir="${test.cache.data}"/>
     <mkdir dir="${test.cache.data}"/>
@@ -398,6 +391,29 @@
     <copy file="${test.src.dir}/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV19" todir="${test.cache.data}"/>
   </target>
 
+  <macrodef name="macro-compile-hdfs-test">
+    <attribute name="target.dir"/>
+    <attribute name="source.dir"/>
+    <attribute name="dest.dir"/>
+    <attribute name="classpath"/>
+    <sequential>
+      <mkdir dir="@{target.dir}"/>
+      <javac
+        encoding="${build.encoding}"
+        srcdir="@{source.dir}"
+        includes="org/apache/hadoop/**/*.java"
+        destdir="@{dest.dir}"
+        debug="${javac.debug}"
+        optimize="${javac.optimize}"
+        target="${javac.version}"
+        source="${javac.version}"
+        deprecation="${javac.deprecation}">
+        <compilerarg line="${javac.args} ${javac.args.warnings}"/>
+        <classpath refid="@{classpath}"/>
+      </javac>
+    </sequential>
+  </macrodef>
+
   <!-- ================================================================== -->
   <!-- Make hadoop-test.jar                                               -->
   <!-- ================================================================== -->
@@ -409,6 +425,15 @@
     <copy todir="${test.build.classes}">
       <fileset dir="${test.hdfs.build.classes}"/>
     </copy>
+    <subant buildpath="build.xml" target="-do-jar-test">
+    </subant>
+    <jar jarfile="${hadoop-hdfs-test-sources.jar}">
+      <fileset dir="${test.src.dir}/hdfs" includes="org/apache/hadoop/**/*.java" />
+      <fileset dir="${test.src.dir}/unit" includes="org/apache/hadoop/**/*.java" />
+    </jar>
+  </target>
+
+  <target name="-do-jar-test">
     <jar jarfile="${build.dir}/${test.hdfs.final.name}.jar"
          basedir="${test.build.classes}">
          <manifest>
@@ -422,10 +447,6 @@
          </manifest>
     </jar>
 
-    <jar jarfile="${hadoop-hdfs-test-sources.jar}">
-      <fileset dir="${test.src.dir}/hdfs" includes="org/apache/hadoop/**/*.java" />
-      <fileset dir="${test.src.dir}/unit" includes="org/apache/hadoop/**/*.java" />
-    </jar>
   </target>
 
   <!-- ================================================================== -->
@@ -458,6 +479,7 @@
     description="Make hadoop-fi.jar">
     <macro-jar-fault-inject
       target.name="jar"
+      build.dir="${build-fi.dir}"
       jar.final.name="final.name"
       jar.final.value="${final.name}-fi" />
   </target>
@@ -514,15 +536,19 @@
   <macrodef name="macro-test-runner">
     <attribute name="test.file" />
     <attribute name="suite.type" />
+    <attribute name="classpath" />
+    <attribute name="test.dir" />
+    <attribute name="fileset.dir" />
+    <attribute name="hadoop.conf.dir.deployed" default="" />
     <sequential>
-      <delete dir="${test.build.data}"/>
-      <mkdir dir="${test.build.data}"/>
-      <delete dir="${test.log.dir}"/>
-      <mkdir dir="${test.log.dir}"/>
+      <delete dir="@{test.dir}/data"/>
+      <mkdir dir="@{test.dir}/data"/>
+      <delete dir="@{test.dir}/logs"/>
+      <mkdir dir="@{test.dir}/logs"/>
       <copy file="${test.src.dir}/hadoop-policy.xml" 
-        todir="${test.build.extraconf}" />
+        todir="@{test.dir}/extraconf" />
       <copy file="${test.src.dir}/fi-site.xml"
-        todir="${test.build.extraconf}" />
+        todir="@{test.dir}/extraconf" />
       <junit showoutput="${test.output}"
         printsummary="${test.junit.printsummary}"
         haltonfailure="${test.junit.haltonfailure}"
@@ -531,42 +557,47 @@
         maxmemory="${test.junit.maxmemory}"
         dir="${basedir}" timeout="${test.timeout}"
         errorProperty="tests.failed" failureProperty="tests.failed">
-        <sysproperty key="test.build.data" value="${test.build.data}"/>
+        <sysproperty key="test.build.data" value="@{test.dir}/data"/>
         <sysproperty key="test.cache.data" value="${test.cache.data}"/>     
         <sysproperty key="test.debug.data" value="${test.debug.data}"/>
-        <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
-        <sysproperty key="test.src.dir" value="${test.src.dir}"/>
-        <sysproperty key="test.build.extraconf" value="${test.build.extraconf}" />
+        <sysproperty key="hadoop.log.dir" value="@{test.dir}/logs"/>
+        <sysproperty key="test.src.dir" value="@{fileset.dir}"/>
+        <sysproperty key="test.build.extraconf" value="@{test.dir}/extraconf" />
         <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
         <sysproperty key="hdfs.rpc.engine" value="${test.hdfs.rpc.engine}"/>
-        <classpath refid="test.classpath"/>
+        <classpath refid="@{classpath}"/>
         <!-- Pass probability specifications to the spawn JVM -->
         <syspropertyset id="FaultProbabilityProperties">
           <propertyref regex="fi.*"/>
         </syspropertyset>
+        <sysproperty key="test.system.hdrc.deployed.hadoopconfdir"
+                     value="@{hadoop.conf.dir.deployed}" />
         <formatter type="${test.junit.output.format}" />
-        <batchtest todir="${test.build.dir}" if="tests.notestcase">
-          <fileset dir="${test.src.dir}/@{suite.type}" excludes="**/${test.exclude}.java">
+        <batchtest todir="@{test.dir}" if="tests.notestcase">
+          <fileset dir="@{fileset.dir}/@{suite.type}"
+            excludes="**/${test.exclude}.java **/${test.exclude}.java
+              aop/** system/**">
              <patternset>
                <includesfile name="@{test.file}"/>
              </patternset>
          </fileset>
         </batchtest>
-        <batchtest todir="${test.build.dir}" if="tests.notestcase.fi">
-          <fileset dir="${test.src.dir}/aop"
+        <batchtest todir="@{test.dir}" if="tests.notestcase.fi">
+          <fileset dir="@{fileset.dir}/aop"
             includes="**/${test.include}.java"
             excludes="**/${test.exclude}.java" />
         </batchtest>
-        <batchtest todir="${test.build.dir}" if="tests.testcase">
-          <fileset dir="${test.src.dir}/@{suite.type}" includes="**/${testcase}.java"/>
+        <batchtest todir="@{test.dir}" if="tests.testcase">
+          <fileset dir="@{fileset.dir}/@{suite.type}" includes="**/${testcase}.java"
+            excludes="aop/** system/**"/>
         </batchtest>
-        <batchtest todir="${test.build.dir}" if="tests.testcase.fi">
-          <fileset dir="${test.src.dir}/aop" includes="**/${testcase}.java"/>
+        <batchtest todir="@{test.dir}" if="tests.testcase.fi">
+          <fileset dir="@{fileset.dir}/aop" includes="**/${testcase}.java"/>
         </batchtest>
         <!--The following batch is for very special occasions only when
         a non-FI tests are needed to be executed against FI-environment -->
-        <batchtest todir="${test.build.dir}" if="tests.testcaseonly.fi">
-          <fileset dir="${test.src.dir}/hdfs" includes="**/${testcase}.java"/>
+        <batchtest todir="@{test.dir}" if="tests.testcaseonly.fi">
+          <fileset dir="@{fileset.dir}/hdfs" includes="**/${testcase}.java"/>
         </batchtest>
       </junit>
       <antcall target="checkfailure"/>
@@ -574,15 +605,30 @@
   </macrodef>
 
   <target name="run-test-hdfs" depends="compile-hdfs-test" description="Run full set of hdfs unit tests">
-    <macro-test-runner test.file="${test.hdfs.all.tests.file}" suite.type="hdfs"/>
+    <macro-test-runner
+      test.file="${test.hdfs.all.tests.file}"
+      suite.type="hdfs"
+      classpath="${test.classpath.id}"
+      test.dir="${test.build.dir}"
+      fileset.dir="${test.src.dir}"/>
   </target>
 
   <target name="run-commit-test" depends="compile-hdfs-test" description="Run approximate 10-minute set of unit tests prior to commiting">
-     <macro-test-runner test.file="${test.hdfs.commit.tests.file}" suite.type="hdfs"/>
+     <macro-test-runner
+       test.file="${test.hdfs.commit.tests.file}"
+       suite.type="hdfs"
+       classpath="${test.classpath.id}"
+       test.dir="${test.build.dir}"
+       fileset.dir="${test.src.dir}"/>
   </target>
 
   <target name="run-test-unit" depends="compile-hdfs-test" description="Run unit tests">
-    <macro-test-runner test.file="${test.hdfs.all.tests.file}" suite.type="unit"/>
+    <macro-test-runner
+      test.file="${test.hdfs.all.tests.file}"
+      suite.type="unit"
+      classpath="${test.classpath.id}"
+      test.dir="${test.build.dir}"
+      fileset.dir="${test.src.dir}"/>
   </target>
 
   <target name="checkfailure" if="tests.failed">
@@ -597,7 +643,7 @@
        <property name="clover.jar" value="${clover.jar}"/>
        <fileset file="${contrib.dir}/build.xml"/>
     </subant> 
-  </target> 
+  </target>
 
   <target name="test-core" description="Run hdfs unit tests">
     <delete file="${test.build.dir}/testsfailed"/>
@@ -978,6 +1024,42 @@
 
   </target>
 
+  <target name="binary-system" depends="bin-package, jar-system, jar-test-system"
+     description="make system test package for deployment">
+    <copy todir="${system-test-build-dir}/${final.name}">
+      <fileset dir="${dist.dir}">
+      </fileset>
+    </copy>
+    <copy todir="${system-test-build-dir}/${final.name}/conf">
+      <fileset dir="${test.src.dir}/system/conf/"/>
+    </copy>
+    <copy tofile="${system-test-build-dir}/${final.name}/lib/hadoop-common-${version}.jar"
+      file="${build-fi.dir}/ivy/lib/${ant.project.name}/system/hadoop-common-${herriot.suffix}-${version}.jar"
+      overwrite="true"/>
+    <copy tofile="${system-test-build-dir}/${final.name}/${final.name}.jar"
+      file="${system-test-build-dir}/${instrumented.final.name}.jar" overwrite="true"/>
+    <copy tofile="${system-test-build-dir}/${final.name}/${final.name}-sources.jar"
+      file="${system-test-build-dir}/${instrumented.final.name}-sources.jar" overwrite="true"/>
+    <copy todir="${system-test-build-dir}/${final.name}"
+      file="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}.jar"/>
+    <copy todir="${system-test-build-dir}/${final.name}"
+      file="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}-sources.jar"/>
+    <macro_tar 
+      param.destfile="${system-test-build-dir}/${final.name}-bin.tar.gz">
+        <param.listofitems>
+          <tarfileset dir="${system-test-build-dir}" mode="664">
+            <exclude name="${final.name}/bin/*" />
+            <exclude name="${final.name}/src/**" />
+            <exclude name="${final.name}/docs/**" />
+            <include name="${final.name}/**" />
+          </tarfileset>
+          <tarfileset dir="${build.dir}" mode="755">
+            <include name="${final.name}/bin/*" />
+          </tarfileset>
+        </param.listofitems>
+      </macro_tar>
+  </target>
+  
   <target name="binary" depends="bin-package" description="Make tarball without source and documentation">
     <macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
       <param.listofitems>
@@ -1015,12 +1097,13 @@
   <!-- ================================================================== -->
   <!-- Clean.  Delete the build files, and their directories              -->
   <!-- ================================================================== -->
-  <target name="clean" depends="clean-contrib" description="Clean.  Delete the build files, and their directories">
+  <target name="clean" depends="clean-contrib, clean-fi" description="Clean.  Delete the build files, and their directories">
     <delete dir="${build.dir}"/>
     <delete dir="${build-fi.dir}"/>
     <delete dir="${docs.src}/build"/>
     <delete file="${hadoop-hdfs.pom}"/>
     <delete file="${hadoop-hdfs-test.pom}"/>
+    <delete file="${hadoop-hdfs-instrumented.pom}"/>
   </target>
 
   <target name="veryclean" depends="clean-cache,clean" 
@@ -1032,6 +1115,8 @@
   <target name="clean-cache" depends="clean" description="Clean. Delete ivy cache">
     <delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-hdfs"/>
     <delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-hdfs-test"/>
+    <delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-hdfs-${herriot.suffix}"/>
+    <delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-hdfs-${herriot.suffix}-test"/>
   </target>
 
   <!-- ================================================================== -->
@@ -1282,7 +1367,8 @@
      </artifact:install>
   </target>
 
-  <target name="mvn-install" depends="mvn-taskdef,jar,jar-hdfs-test,set-version">
+  <target name="mvn-install" depends="mvn-taskdef,jar,jar-hdfs-test,set-version,
+    -mvn-system-install">
      <artifact:pom file="${hadoop-hdfs.pom}" id="hadoop.hdfs"/>
      <artifact:pom file="${hadoop-hdfs-test.pom}" id="hadoop.hdfs.test"/>
      <artifact:install file="${hadoop-hdfs.jar}">
@@ -1295,7 +1381,8 @@
      </artifact:install>
   </target>
 
-  <target name="mvn-deploy" depends="mvn-taskdef, jar, jar-hdfs-test, set-version">
+  <target name="mvn-deploy" depends="mvn-taskdef, jar, jar-hdfs-test, set-version,
+    -mvn-system-deploy">
      <property name="repourl" value="https://repository.apache.org/content/repositories/snapshots" />
      <artifact:pom file="${hadoop-hdfs.pom}" id="hadoop.hdfs"/>
      <artifact:pom file="${hadoop-hdfs-test.pom}" id="hadoop.hdfs.test"/>
@@ -1316,20 +1403,22 @@
   <target name="set-version">
     <delete file="${basedir}/ivy/hadoop-hdfs.xml"/>
     <delete file="${basedir}/ivy/hadoop-hdfs-test.xml"/>
+    <delete file="${hadoop-hdfs-instrumented.pom}"/>
+    <delete file="${hadoop-hdfs-instrumented-test.pom}"/>
     <copy file="${basedir}/ivy/hadoop-hdfs-template.xml" tofile="${basedir}/ivy/hadoop-hdfs.xml"/>
     <copy file="${basedir}/ivy/hadoop-hdfs-test-template.xml" tofile="${basedir}/ivy/hadoop-hdfs-test.xml"/>
+    <copy file="${basedir}/ivy/hadoop-hdfs-${herriot.suffix}-template.xml"
+      tofile="${hadoop-hdfs-instrumented.pom}"/>
+    <copy file="${basedir}/ivy/hadoop-hdfs-${herriot.suffix}-test-template.xml"
+      tofile="${hadoop-hdfs-instrumented-test.pom}"/>
     <replaceregexp byline="true">
       <regexp pattern="@version"/>
       <substitution expression="${version}"/>
       <fileset dir="${basedir}/ivy">
         <include name="hadoop-hdfs.xml"/>
-      </fileset>
-    </replaceregexp>
-    <replaceregexp byline="true">
-      <regexp pattern="@version"/>
-      <substitution expression="${version}"/>
-      <fileset dir="${basedir}/ivy">
         <include name="hadoop-hdfs-test.xml"/>
+        <include name="hadoop-hdfs-${herriot.suffix}.xml"/>
+        <include name="hadoop-hdfs-${herriot.suffix}-test.xml"/>
       </fileset>
     </replaceregexp>
   </target>
@@ -1403,6 +1492,11 @@
       log="${ivyresolvelog}"/>
   </target>
 
+  <target name="ivy-resolve-system" depends="ivy-init">
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="system"
+      log="${ivyresolvelog}"/>
+  </target>
+
   <target name="ivy-retrieve" depends="ivy-resolve"
     description="Retrieve Ivy-managed artifacts">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
@@ -1458,6 +1552,14 @@
     <ivy:cachepath pathid="releaseaudit-classpath" conf="releaseaudit"/>
   </target>
 
+  <target name="ivy-retrieve-system" depends="ivy-resolve-system"
+    description="Retrieve Ivy-managed artifacts for the system tests">
+    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+        log="${ivyretrievelog}"/>
+    <ivy:cachepath pathid="ivy-test.classpath" conf="system"/>
+  </target>
+
   <target name="ivy-report" depends="ivy-resolve-releaseaudit"
     description="Generate">
     <ivy:report todir="${build.ivy.report.dir}" settingsRef="${ant.project.name}.ivy.settings"/>

Propchange: hadoop/hdfs/branches/branch-0.21/build.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun  2 01:21:07 2010
@@ -2,4 +2,4 @@
 /hadoop/core/trunk/build.xml:779102
 /hadoop/hdfs/branches/HDFS-265/build.xml:796829-820463
 /hadoop/hdfs/branches/branch-0.21/build.xml:820487
-/hadoop/hdfs/trunk/build.xml:947194
+/hadoop/hdfs/trunk/build.xml:947194,950323

Modified: hadoop/hdfs/branches/branch-0.21/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/ivy.xml?rev=950329&r1=950328&r2=950329&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/ivy.xml (original)
+++ hadoop/hdfs/branches/branch-0.21/ivy.xml Wed Jun  2 01:21:07 2010
@@ -39,6 +39,7 @@
     <conf name="common" visibility="private" extends="compile,runtime" description="common artifacts"/>
     <conf name="javadoc" visibility="private" description="artiracts required while performing doc generation" extends="common"/>
     <conf name="test" extends="common" visibility="private" description="the classpath needed to run tests"/>
+    <conf name="system" extends="test" visibility="private" description="the classpath needed to run system tests"/>
 
     <conf name="test-hdfswithmr" extends="test, common" visibility="private" description="the classpath needed to run tests"/>
 
@@ -56,6 +57,7 @@
   <dependencies>
     
     <dependency org="org.apache.hadoop" name="hadoop-common" rev="${hadoop-common.version}" conf="common->default"/>
+    <dependency org="org.apache.hadoop" name="hadoop-common-instrumented" rev="${hadoop-common.version}" conf="system->default"/>
     <dependency org="commons-logging" name="commons-logging" rev="${commons-logging.version}" conf="common->master"/>
     <dependency org="log4j" name="log4j" rev="${log4j.version}" conf="common->master"/>
     <dependency org="org.apache.hadoop" name="avro" rev="${avro.version}" conf="common->default"/>

Copied: hadoop/hdfs/branches/branch-0.21/ivy/hadoop-hdfs-instrumented-template.xml (from r950323, hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-template.xml)
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/ivy/hadoop-hdfs-instrumented-template.xml?p2=hadoop/hdfs/branches/branch-0.21/ivy/hadoop-hdfs-instrumented-template.xml&p1=hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-template.xml&r1=950323&r2=950329&rev=950329&view=diff
==============================================================================
--- hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-template.xml (original)
+++ hadoop/hdfs/branches/branch-0.21/ivy/hadoop-hdfs-instrumented-template.xml Wed Jun  2 01:21:07 2010
@@ -33,7 +33,7 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common-instrumented</artifactId>
-      <version>0.22.0-SNAPSHOT</version>
+      <version>0.21.0-SNAPSHOT</version>
     </dependency>
   </dependencies>
 </project>

Modified: hadoop/hdfs/branches/branch-0.21/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/ivy/libraries.properties?rev=950329&r1=950328&r2=950329&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/ivy/libraries.properties (original)
+++ hadoop/hdfs/branches/branch-0.21/ivy/libraries.properties Wed Jun  2 01:21:07 2010
@@ -75,6 +75,7 @@ slf4j-log4j12.version=1.4.3
 xmlenc.version=0.52
 xerces.version=1.4.4
 
+#This property has to be updated synchronously with aop.xml
 aspectj.version=1.6.5
 
 mockito-all.version=1.8.2

Propchange: hadoop/hdfs/branches/branch-0.21/src/c++/libhdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun  2 01:21:07 2010
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/c++/libhdfs:713112
 /hadoop/core/trunk/src/c++/libhdfs:776175-784663
-/hadoop/hdfs/trunk/src/c++/libhdfs:947194
+/hadoop/hdfs/trunk/src/c++/libhdfs:947194,950323

Propchange: hadoop/hdfs/branches/branch-0.21/src/contrib/hdfsproxy/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun  2 01:21:07 2010
@@ -2,4 +2,4 @@
 /hadoop/core/trunk/src/contrib/hdfsproxy:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/contrib/hdfsproxy:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/contrib/hdfsproxy:820487
-/hadoop/hdfs/trunk/src/contrib/hdfsproxy:947194
+/hadoop/hdfs/trunk/src/contrib/hdfsproxy:947194,950323

Propchange: hadoop/hdfs/branches/branch-0.21/src/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun  2 01:21:07 2010
@@ -2,4 +2,4 @@
 /hadoop/core/trunk/src/hdfs:776175-785643,785929-786278
 /hadoop/hdfs/branches/HDFS-265/src/java:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/java:820487
-/hadoop/hdfs/trunk/src/java:947194
+/hadoop/hdfs/trunk/src/java:947194,950323

Propchange: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun  2 01:21:07 2010
@@ -4,4 +4,4 @@
 /hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:776175-785643,785929-786278
 /hadoop/hdfs/branches/HDFS-265/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:820487
-/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:947194
+/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:947194,950323

Modified: hadoop/hdfs/branches/branch-0.21/src/test/aop/build/aop.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/aop/build/aop.xml?rev=950329&r1=950328&r2=950329&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/aop/build/aop.xml (original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/aop/build/aop.xml Wed Jun  2 01:21:07 2010
@@ -14,13 +14,42 @@
    See the License for the specific language governing permissions and
    limitations under the License.
 -->
-<project name="aspects">
+<project name="aspects"
+  xmlns:artifact="urn:maven-artifact-ant">
+  <!-- The followng are duplications and have to be customized elsewhere too -->
+  <!-- TODO this version has to be updated synchronously with Ivy -->
+  <property name="aspectversion" value="1.6.5"/>
+  <!-- TODO this has to be changed synchronously with build.xml version prop.-->
+  <!-- this workarounds of test-patch setting its own 'version' -->
+  <property name="project.version" value="0.21.0-SNAPSHOT"/>
+
+  <!-- Properties common for all fault injections -->
   <property name="build-fi.dir" value="${basedir}/build-fi"/>
   <property name="hadoop-fi.jar" location="${build.dir}/${final.name}-fi.jar" />
   <property name="compile-inject.output" value="${build-fi.dir}/compile-fi.log"/>
-  <property name="aspectversion" value="1.6.5"/>
   <property file="${basedir}/build.properties"/>
 
+  <!-- Properties related to system fault injection and tests -->
+  <property name="system-test-build-dir" value="${build-fi.dir}/system"/>
+  <!-- This varialbe is set by respective injection targets -->
+  <property name="hadoop.instrumented.jar" value=""/>
+
+  <!-- Properties specifically for system fault-injections and system tests -->
+  <property name="herriot.suffix" value="instrumented"/>
+  <property name="instrumented.final.name"
+            value="${name}-${herriot.suffix}-${version}"/>
+  <property name="hadoop-hdfs-instrumented.pom"
+            location="${ivy.dir}/hadoop-hdfs-${herriot.suffix}.xml" />
+  <property name="hadoop-hdfs-instrumented-test.pom"
+            location="${ivy.dir}/hadoop-hdfs-${herriot.suffix}-test.xml" />
+  <property name="hadoop-hdfs-instrumented.jar"
+            location="${system-test-build-dir}/${name}-${herriot.suffix}-${version}.jar" />
+  <property name="hadoop-hdfs-instrumented-sources.jar"
+            location="${system-test-build-dir}/${name}-${herriot.suffix}-${version}-sources.jar" />
+  <property name="hadoop-hdfs-instrumented-test.jar"
+            location="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}.jar" />
+  <property name="hadoop-hdfs-instrumented-test-sources.jar"
+            location="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}-sources.jar" />
   <!--All Fault Injection (FI) related targets are located in this session -->
     
   <target name="clean-fi">
@@ -39,21 +68,27 @@
     <taskdef
       resource="org/aspectj/tools/ant/taskdefs/aspectjTaskdefs.properties">
       <classpath>
-        <pathelement 
+        <pathelement
           location="${common.ivy.lib.dir}/aspectjtools-${aspectversion}.jar"/>
       </classpath>
     </taskdef>
     <echo message="Start weaving aspects in place"/>
+    <path id="aspect.path">
+      <pathelement location="${hadoop.instrumented.jar}"/>
+    </path>
     <iajc
       encoding="${build.encoding}" 
-      srcdir="${java.src.dir};${build.src};${test.src.dir}/aop" 
+      srcdir="${java.src.dir};${build.src};${src.dir.path}"
       includes="org/apache/hadoop/**/*.java, org/apache/hadoop/**/*.aj"
-      excludes="org/apache/hadoop/record/**/*"
-      destDir="${build.classes}"
+      excludes="org/apache/hadoop/classification/tools/**/*, org/apache/hadoop/record/**/*"
+      destDir="${dest.dir}"
       debug="${javac.debug}"
       target="${javac.version}"
       source="${javac.version}"
-      deprecation="${javac.deprecation}">
+      deprecation="${javac.deprecation}"
+      fork="true"
+      maxmem="256m">
+      <aspectpath refid="aspect.path"/>
       <classpath refid="test.classpath"/>
     </iajc>
     <loadfile property="injection.failure" srcfile="${compile-inject.output}">
@@ -69,15 +104,122 @@
     <echo message="Weaving of aspects is finished"/>
   </target>
 
+  <!-- Classpath for running system tests -->
+  <path id="test.system.classpath">
+        <pathelement location="${hadoop.conf.dir.deployed}" />
+        <pathelement location="${system-test-build-dir}/test/extraconf" />
+        <pathelement location="${system-test-build-dir}/test/classes" />
+        <pathelement location="${system-test-build-dir}/classes" />
+        <pathelement location="${test.src.dir}" />
+        <pathelement location="${build-fi.dir}" />
+        <pathelement location="${build-fi.dir}/tools" />
+        <pathelement path="${clover.jar}" />
+        <fileset dir="${system-test-build-dir}">
+           <include name="**/*.jar" />
+           <exclude name="**/excluded/" />
+         </fileset>
+        <path refid="classpath" />
+  </path>
+
+  <!-- ================ -->
+  <!-- run system tests -->
+  <!-- ================ -->
+  <target name="test-system" depends="ivy-retrieve-common, ivy-retrieve-system"
+    description="Run system tests">
+    <subant buildpath="build.xml" target="jar-test-system"/>
+    <macro-test-runner test.file="${test.hdfs.all.tests.file}"
+                       suite.type="system/test"
+                       classpath="test.system.classpath"
+                       test.dir="${system-test-build-dir}/test"
+                       fileset.dir="${test.src.dir}"
+                       hadoop.conf.dir.deployed="${hadoop.conf.dir.deployed}">
+    </macro-test-runner>
+  </target>
+
   <target name="injectfaults" 
   	description="Instrument classes with faults and other AOP advices">
     <!--mkdir to prevent <subant> failure in case the folder has been removed-->
     <mkdir dir="${build-fi.dir}"/>
     <delete file="${compile-inject.output}"/>
-    <subant buildpath="${basedir}" target="compile-fault-inject"
-    	output="${compile-inject.output}">
+    <weave-injectfault-aspects dest.dir="${build-fi.dir}/classes"
+                               src.dir="${test.src.dir}/aop"
+      aspects.jar="${build-fi.dir}/ivy/lib/${ant.project.name}/test/hadoop-common-${project.version}.jar">
+    </weave-injectfault-aspects>
+  </target>
+
+  <!-- =============================================================== -->
+  <!-- Create hadoop-{version}-dev-core.jar required to be deployed on -->
+  <!-- cluster for system tests                                        -->
+  <!-- =============================================================== -->
+  <target name="jar-system"
+          depends="inject-system-faults"
+          description="Make hadoop-hdfs-instrumented.jar with system injections.">
+    <macro-jar-fault-inject target.name="jar"
+      build.dir="${system-test-build-dir}"
+      jar.final.name="final.name"
+      jar.final.value="${instrumented.final.name}">
+    </macro-jar-fault-inject>
+    <jar jarfile="${system-test-build-dir}/${instrumented.final.name}-sources.jar"
+      update="yes">
+      <fileset dir="${test.src.dir}/system/java" includes="org/apache/hadoop/**/*.java" />
+      <fileset dir="${test.src.dir}/system/aop" includes="org/apache/hadoop/**/*.aj" />
+    </jar>
+  </target>
+
+  <target name="jar-test-system" depends="inject-system-faults, compile-test-system"
+    description="Make hadoop-hdfs-instrumented-test.jar with system injections.">
+    <subant buildpath="build.xml" target="-do-jar-test">
+      <property name="build.dir" value="${system-test-build-dir}"/>
+      <property name="test.hdfs.final.name" value="${name}-${herriot.suffix}-test-${version}"/>
+      <property name="test.build.classes"
+        value="${system-test-build-dir}/test/classes"/>
+    </subant>
+    <jar jarfile="${hadoop-hdfs-instrumented-test-sources.jar}">
+      <fileset dir="${test.src.dir}/system/test" includes="org/apache/hadoop/**/*.java" />
+    </jar>
+  </target>
+
+  <target name="compile-test-system" description="Compiles system tests">
+    <subant buildpath="build.xml" target="-compile-test-system.wrapper">
+      <property name="build.dir" value="${system-test-build-dir}"/>
+    </subant>
+  </target>
+
+  <target name="-compile-test-system.wrapper" depends="ivy-retrieve-common, ivy-retrieve-system">
+    <macro-compile-hdfs-test
+      target.dir="${system-test-build-dir}/test/classes"
+      source.dir="${test.src.dir}/system/test"
+      dest.dir="${system-test-build-dir}/test/classes"
+      classpath="test.classpath"/>
+  </target>
+
+  <macrodef name="weave-injectfault-aspects">
+    <attribute name="dest.dir" />
+    <attribute name="src.dir" />
+    <attribute name="aspects.jar"/>
+    <sequential>
+      <subant buildpath="build.xml" target="compile-fault-inject"
+        output="${compile-inject.output}">
+        <property name="build.dir" value="${build-fi.dir}" />
+        <property name="src.dir.path" value="@{src.dir}" />
+        <property name="dest.dir" value="@{dest.dir}" />
+        <property name="hadoop.instrumented.jar" value="@{aspects.jar}"/>
+      </subant>
+    </sequential>
+  </macrodef>
+
+  <target name="inject-system-faults"
+          description="Inject system faults">
+    <property name="build-fi.dir" value="${system-test-build-dir}" />
+    <mkdir dir="${build-fi.dir}"/>
+    <delete file="${compile-inject.output}"/>
+    <subant buildpath="build.xml" target="ivy-retrieve-system">
       <property name="build.dir" value="${build-fi.dir}"/>
     </subant>
+    <weave-injectfault-aspects dest.dir="${system-test-build-dir}/classes"
+                               src.dir="${test.src.dir}/system/java;${test.src.dir}/system/aop"
+      aspects.jar="${build-fi.dir}/ivy/lib/${ant.project.name}/system/hadoop-common-${herriot.suffix}-${project.version}.jar">
+    </weave-injectfault-aspects>
   </target>
 
   <macrodef name="macro-run-tests-fault-inject">
@@ -99,11 +241,12 @@
   <!-- ================================================================== -->
   <macrodef name="macro-jar-fault-inject">
     <attribute name="target.name" />
+    <attribute name="build.dir" />
     <attribute name="jar.final.name" />
     <attribute name="jar.final.value" />
     <sequential>
       <subant buildpath="build.xml" target="@{target.name}">
-        <property name="build.dir" value="${build-fi.dir}"/>
+        <property name="build.dir" value="@{build.dir}"/>
         <property name="@{jar.final.name}" value="@{jar.final.value}"/>
         <property name="jar.extra.properties.list" 
         	  value="${test.src.dir}/fi-site.xml" />
@@ -129,4 +272,78 @@
   </macrodef>
 
   <!--End of Fault Injection (FI) related session-->
+
+  <!-- Start of cluster controller binary target -->
+  <property name="runAs.src"
+    value ="${test.src.dir}/system/c++/runAs"/>
+  <property name="runAs.build.dir"
+    value="${system-test-build-dir}/c++-build"/>
+  <property name="runAs.configure.script"
+    value="${runAs.build.dir}/configure"/>
+  <target name="init-runAs-build">
+    <condition property="runAs.parameters.passed">
+      <not>
+        <equals arg1="${run-as.hadoop.home.dir}"
+          arg2="$${run-as.hadoop.home.dir}"/>
+      </not>
+    </condition>
+    <fail unless="runAs.parameters.passed"
+          message="Required parameters run-as.hadoop.home.dir not passed to the build"/>
+    <mkdir dir="${runAs.build.dir}"/>
+    <copy todir="${runAs.build.dir}" overwrite="true">
+      <fileset dir="${runAs.src}" includes="**/*"/>
+    </copy>
+    <chmod perm="+x" file="${runAs.configure.script}">
+    </chmod>
+  </target>
+
+  <target name="configure-runAs"
+    depends="init-runAs-build">
+    <exec executable="${runAs.configure.script}"
+      dir="${runAs.build.dir}" failonerror="true">
+      <arg value="--with-home=${run-as.hadoop.home.dir}"/>
+    </exec>
+  </target>
+  <target name="run-as" depends="configure-runAs">
+    <exec executable="${make.cmd}" dir="${runAs.build.dir}"
+        searchpath="yes" failonerror="yes">
+     <arg value="all" />
+    </exec>
+  </target>
+  <!-- End of cluster controller binary target -->
+  <!-- Install Herriot artifacts to the local Maven -->
+  <target name="-mvn-system-install" depends="mvn-taskdef, jar-system, jar-test-system">
+    <artifact:pom file="${hadoop-hdfs-instrumented.pom}"
+                  id="hadoop.hdfs.${herriot.suffix}"/>
+    <artifact:pom file="${hadoop-hdfs-instrumented-test.pom}"
+                  id="hadoop.hdfs.${herriot.suffix}.test"/>
+    <artifact:install file="${hadoop-hdfs-instrumented.jar}">
+      <pom refid="hadoop.hdfs.${herriot.suffix}"/>
+      <attach file="${hadoop-hdfs-instrumented-sources.jar}" classifier="sources" />
+    </artifact:install>
+    <artifact:install file="${hadoop-hdfs-instrumented-test.jar}">
+      <pom refid="hadoop.hdfs.${herriot.suffix}.test"/>
+      <attach file="${hadoop-hdfs-instrumented-test-sources.jar}" classifier="sources" />
+    </artifact:install>
+  </target>
+  <target name="-mvn-system-deploy" depends="mvn-taskdef, jar-system, jar-test-system">
+    <property name="repourl" value="https://repository.apache.org/content/repositories/snapshots" />
+    <artifact:pom file="${hadoop-hdfs-instrumented.pom}"
+                  id="hadoop.hdfs.${herriot.suffix}"/>
+     <!--<artifact:pom file="${hadoop-hdfs-instrumented-test.pom}"-->
+                   <!--id="hadoop.hdfs.system.test.test"/>-->
+
+    <artifact:install-provider artifactId="wagon-http" version="1.0-beta-2"/>
+    <artifact:deploy file="${hadoop-hdfs-instrumented.jar}">
+      <remoteRepository id="apache.snapshots.https" url="${repourl}"/>
+      <pom refid="hadoop.hdfs.${herriot.suffix}"/>
+      <attach file="${hadoop-hdfs-instrumented-sources.jar}" classifier="sources" />
+    </artifact:deploy>
+    <artifact:deploy file="${hadoop-hdfs-instrumented-test.jar}">
+      <remoteRepository id="apache.snapshots.https" url="${repourl}"/>
+      <pom refid="hadoop.hdfs.${herriot.suffix}.test"/>
+      <attach file="${hadoop-hdfs-instrumented-test-sources.jar}" classifier="sources" />
+    </artifact:deploy>
+  </target>
+  <!-- End of Maven -->
 </project>

Propchange: hadoop/hdfs/branches/branch-0.21/src/test/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun  2 01:21:07 2010
@@ -2,4 +2,4 @@
 /hadoop/core/trunk/src/test/hdfs:776175-785643
 /hadoop/hdfs/branches/HDFS-265/src/test/hdfs:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/test/hdfs:820487
-/hadoop/hdfs/trunk/src/test/hdfs:947194
+/hadoop/hdfs/trunk/src/test/hdfs:947194,950323

Copied: hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNProtocol.java (from r950323, hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNProtocol.java)
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNProtocol.java?p2=hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNProtocol.java&p1=hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNProtocol.java&r1=950323&r2=950329&rev=950329&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNProtocol.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNProtocol.java Wed Jun  2 01:21:07 2010
@@ -19,18 +19,22 @@
 package org.apache.hadoop.hdfs.test.system;
 
 import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.test.system.DaemonProtocol;
 
 /**
  * Client side API exposed from Datanode.
  * Actual implementations are likely to be injected
- *
+ */
+
+/* KerberosInfo class hasn't been backported from trunk yet, thus
+ * I'm commenting this out until a decision is made
  * The protocol has to be annotated so KerberosInfo can be filled in during
  * creation of a ipc.Client connection
- */
+
+import org.apache.hadoop.security.KerberosInfo;
 @KerberosInfo(
     serverPrincipal = DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY)
+ */
 public interface DNProtocol extends DaemonProtocol {
   public static final long versionID = 1L;
 }

Copied: hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNProtocol.java (from r950323, hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNProtocol.java)
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNProtocol.java?p2=hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNProtocol.java&p1=hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNProtocol.java&r1=950323&r2=950329&rev=950329&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNProtocol.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNProtocol.java Wed Jun  2 01:21:07 2010
@@ -19,18 +19,21 @@
 package org.apache.hadoop.hdfs.test.system;
 
 import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.test.system.DaemonProtocol;
 
 /**
  * Client side API exposed from Namenode.
  * Actual implementations are likely to be injected
- *
+ */
+
+/* KerberosInfo class hasn't been backported from trunk yet, thus
+ * I'm commenting this out until a decision is made
  * The protocol has to be annotated so KerberosInfo can be filled in during
  * creation of a ipc.Client connection
- */
+import org.apache.hadoop.security.KerberosInfo;
 @KerberosInfo(
     serverPrincipal = DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY)
+ */
 public interface NNProtocol extends DaemonProtocol {
   public static final long versionID = 1L;
 }

Propchange: hadoop/hdfs/branches/branch-0.21/src/webapps/datanode/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun  2 01:21:07 2010
@@ -2,4 +2,4 @@
 /hadoop/core/trunk/src/webapps/datanode:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/webapps/datanode:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/webapps/datanode:820487
-/hadoop/hdfs/trunk/src/webapps/datanode:947194
+/hadoop/hdfs/trunk/src/webapps/datanode:947194,950323

Propchange: hadoop/hdfs/branches/branch-0.21/src/webapps/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun  2 01:21:07 2010
@@ -2,4 +2,4 @@
 /hadoop/core/trunk/src/webapps/hdfs:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/webapps/hdfs:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/webapps/hdfs:820487
-/hadoop/hdfs/trunk/src/webapps/hdfs:947194
+/hadoop/hdfs/trunk/src/webapps/hdfs:947194,950323

Propchange: hadoop/hdfs/branches/branch-0.21/src/webapps/secondary/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun  2 01:21:07 2010
@@ -2,4 +2,4 @@
 /hadoop/core/trunk/src/webapps/secondary:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/webapps/secondary:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/webapps/secondary:820487
-/hadoop/hdfs/trunk/src/webapps/secondary:947194
+/hadoop/hdfs/trunk/src/webapps/secondary:947194,950323



Mime
View raw message