hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r1077176 [1/2] - in /hadoop/common/branches/branch-0.20-security-patches: ./ src/ src/test/aop/build/ src/test/system/ src/test/system/aop/ src/test/system/aop/org/ src/test/system/aop/org/apache/ src/test/system/aop/org/apache/hadoop/ src/...
Date Fri, 04 Mar 2011 03:48:50 GMT
Author: omalley
Date: Fri Mar  4 03:48:49 2011
New Revision: 1077176

URL: http://svn.apache.org/viewvc?rev=1077176&view=rev
Log:
commit cba9e3807d2cc5d427d343a643ffd56362765646
Author: Konstantin Boudnik <cos@goodenter-lm.local>
Date:   Fri Feb 19 14:35:02 2010 -0800

    Merges yahoo-hadoop-0.20.1xx and yahoo-hadoop-0.20-automation.
    Incorporates the following patches from yahoo-hadoop-0.20-automation branch
    
     patch from
    
     patch
     from
    
     patch from
     patch
     patch
     patch
     patch
     patch
    : patch
    : patch

Added:
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JTProtocolAspect.aj
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobClientAspect.aj
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobInProgressAspect.aj
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobTrackerAspect.aj
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/TaskTrackerAspect.aj
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/test/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/test/system/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/test/system/DaemonProtocolAspect.aj
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/JobInfoImpl.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TTInfoImpl.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TTTaskInfoImpl.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TaskInfoImpl.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestCluster.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestSortValidate.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestTaskOwner.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapreduce/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapreduce/test/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapreduce/test/system/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapreduce/test/system/JTClient.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapreduce/test/system/JTProtocol.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapreduce/test/system/JobInfo.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapreduce/test/system/MRCluster.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapreduce/test/system/MRDaemonClient.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapreduce/test/system/MRFault.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapreduce/test/system/TTClient.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapreduce/test/system/TTInfo.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapreduce/test/system/TTProtocol.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapreduce/test/system/TTTaskInfo.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapreduce/test/system/TaskInfo.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/AbstractDaemonClient.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/AbstractMasterSlaveCluster.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/DaemonProtocol.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/ProcessInfo.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/ProcessInfoImpl.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/ClusterProcessManager.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/ClusterProcessManagerFactory.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/HadoopDaemonRemoteCluster.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/RemoteProcess.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/testjar/UserNamePermission.java
Modified:
    hadoop/common/branches/branch-0.20-security-patches/build.xml
    hadoop/common/branches/branch-0.20-security-patches/src/saveVersion.sh
    hadoop/common/branches/branch-0.20-security-patches/src/test/aop/build/aop.xml

Modified: hadoop/common/branches/branch-0.20-security-patches/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/build.xml?rev=1077176&r1=1077175&r2=1077176&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/build.xml (original)
+++ hadoop/common/branches/branch-0.20-security-patches/build.xml Fri Mar  4 03:48:49 2011
@@ -621,14 +621,25 @@
   <!--                                                                    -->
   <!-- ================================================================== -->
   <target name="examples" depends="jar, compile-examples" description="Make the Hadoop examples jar.">
-    <jar jarfile="${build.dir}/${examples.final.name}.jar"
-         basedir="${build.examples}">
-      <manifest>
-        <attribute name="Main-Class" 
-                   value="org/apache/hadoop/examples/ExampleDriver"/>
-      </manifest>
-    </jar>
-  </target>
+    <macro-jar-examples
+      build.dir="${build.dir}"
+      basedir="${build.examples}">
+    </macro-jar-examples>
+  </target>
+
+  <macrodef name="macro-jar-examples">
+    <attribute name="build.dir" />
+    <attribute name="basedir" />
+    <sequential>
+      <jar jarfile="@{build.dir}/${examples.final.name}.jar"
+           basedir="@{basedir}">
+        <manifest>
+          <attribute name="Main-Class"
+                    value="org/apache/hadoop/examples/ExampleDriver"/>
+        </manifest>
+      </jar>
+    </sequential>
+  </macrodef>
 
   <target name="tools-jar" depends="jar, compile-tools" 
           description="Make the Hadoop tools jar.">
@@ -791,6 +802,7 @@
     description="Make hadoop-fi.jar">
     <macro-jar-fault-inject
       target.name="jar"
+      build.dir="${build-fi.dir}"
       jar.final.name="final.name"
       jar.final.value="${final.name}-fi" />
   </target>
@@ -840,76 +852,103 @@
   <!-- Run unit tests                                                     --> 
   <!-- ================================================================== -->
   <target name="test-core" depends="jar-test" description="Run core unit tests">
-
-    <delete file="${test.build.dir}/testsfailed"/>
-    <delete dir="${test.build.data}"/>
-    <mkdir dir="${test.build.data}"/>
-    <delete dir="${test.log.dir}"/>
-    <mkdir dir="${test.log.dir}"/>
-  	<copy file="${test.src.dir}/hadoop-policy.xml" 
-  	  todir="${test.build.extraconf}" />
-    <copy file="${test.src.dir}/fi-site.xml"
-      todir="${test.build.extraconf}" />
-    <junit showoutput="${test.output}"
-      printsummary="${test.junit.printsummary}"
-      haltonfailure="${test.junit.haltonfailure}"
-      fork="yes"
-      forkmode="${test.junit.fork.mode}"
-      maxmemory="${test.junit.maxmemory}"
-      dir="${basedir}" timeout="${test.timeout}"
-      errorProperty="tests.failed" failureProperty="tests.failed">
-      <sysproperty key="test.build.data" value="${test.build.data}"/>
-      <sysproperty key="test.tools.input.dir" value="${test.tools.input.dir}"/>
-      <sysproperty key="test.cache.data" value="${test.cache.data}"/>    	
-      <sysproperty key="test.debug.data" value="${test.debug.data}"/>
-      <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
-      <sysproperty key="test.src.dir" value="${test.src.dir}"/>
-      <sysproperty key="taskcontroller-path" value="${taskcontroller-path}"/>
-      <sysproperty key="taskcontroller-ugi" value="${taskcontroller-ugi}"/>
-      <sysproperty key="test.build.extraconf" value="${test.build.extraconf}" />
-      <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
-      <sysproperty key="java.library.path"
-       value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
-      <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
-      <!-- set io.compression.codec.lzo.class in the child jvm only if it is set -->
-	  <syspropertyset dynamic="no">
-		  <propertyref name="io.compression.codec.lzo.class"/>
-	  </syspropertyset>
-      <!-- set compile.c++ in the child jvm only if it is set -->
-      <syspropertyset dynamic="no">
-         <propertyref name="compile.c++"/>
-      </syspropertyset>
-      <classpath refid="${test.classpath.id}"/>
-      <syspropertyset id="FaultProbabilityProperties">
-        <propertyref regex="fi.*"/>
-      </syspropertyset>
-      <formatter type="${test.junit.output.format}" />
-      <batchtest todir="${test.build.dir}" if="tests.notestcase">
-        <fileset dir="${test.src.dir}"
-           includes="**/${test.include}.java"
-           excludes="**/${test.exclude}.java aop/**" />
-      </batchtest>
-      <batchtest todir="${test.build.dir}" if="tests.notestcase.fi">
-        <fileset dir="${test.src.dir}/aop"
-          includes="**/${test.include}.java"
-          excludes="**/${test.exclude}.java" />
-      </batchtest>
-      <batchtest todir="${test.build.dir}" if="tests.testcase">
-        <fileset dir="${test.src.dir}"
-          includes="**/${testcase}.java" excludes="aop/**"/>
-      </batchtest>
-      <batchtest todir="${test.build.dir}" if="tests.testcase.fi">
-        <fileset dir="${test.src.dir}/aop" includes="**/${testcase}.java"/>
-      </batchtest>
-      <!--The following batch is for very special occasions only when
-      a non-FI tests are needed to be executed against FI-environment -->
-      <batchtest todir="${test.build.dir}" if="tests.testcaseonly">
-        <fileset dir="${test.src.dir}" includes="**/${testcase}.java"/>
-      </batchtest>
-    </junit>
-    <antcall target="checkfailure"/>
+    <macro-test-runner classpath="${test.classpath.id}"
+                       test.dir="${test.build.dir}"
+                       fileset.dir="${test.src.dir}"
+                       >
+    </macro-test-runner>
   </target>   
 
+  <macrodef name="macro-test-runner">
+    <attribute name="classpath" />
+    <attribute name="test.dir" />
+    <attribute name="fileset.dir" />
+    <attribute name="hadoop.home" default="" />
+    <attribute name="hadoop.conf.dir" default="" />
+    <attribute name="hadoop.conf.dir.deployed" default="" />
+    <sequential>
+      <delete dir="@{test.dir}/data" />
+      <mkdir dir="@{test.dir}/data" />
+      <delete dir="@{test.dir}/logs" />
+      <mkdir dir="@{test.dir}/logs" />
+      <copy file="${test.src.dir}/hadoop-policy.xml"
+            todir="@{test.dir}/extraconf" />
+      <copy file="${test.src.dir}/fi-site.xml"
+            todir="@{test.dir}/extraconf" />
+      <junit showoutput="${test.output}"
+             printsummary="${test.junit.printsummary}"
+             haltonfailure="${test.junit.haltonfailure}"
+             fork="yes"
+             forkmode="${test.junit.fork.mode}"
+             maxmemory="${test.junit.maxmemory}"
+             dir="${basedir}"
+             timeout="${test.timeout}"
+             errorProperty="tests.failed"
+             failureProperty="tests.failed">
+        <sysproperty key="test.build.data" value="${test.build.data}" />
+        <sysproperty key="test.tools.input.dir"
+                     value="${test.tools.input.dir}" />
+        <sysproperty key="test.cache.data" value="${test.cache.data}" />
+        <sysproperty key="test.debug.data" value="${test.debug.data}" />
+        <sysproperty key="hadoop.log.dir" value="${test.log.dir}" />
+        <sysproperty key="test.src.dir" value="${test.src.dir}" />
+        <sysproperty key="taskcontroller-path" value="${taskcontroller-path}" />
+        <sysproperty key="taskcontroller-user" value="${taskcontroller-user}" />
+        <sysproperty key="test.build.extraconf"
+                     value="@{test.dir}/extraconf" />
+        <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml" />
+        <sysproperty key="java.library.path"
+                     value="${build.native}/lib:${lib.dir}/native/${build.platform}" />
+        <sysproperty key="install.c++.examples"
+                     value="${install.c++.examples}" />
+        <sysproperty key="testjar"
+                     value="@{test.dir}/testjar" />
+        <!-- System properties that are specifically set for system tests -->
+        <sysproperty key="test.system.hdrc.hadoophome" value="@{hadoop.home}" />
+        <sysproperty key="test.system.hdrc.hadoopconfdir"
+                     value="@{hadoop.conf.dir}" />
+        <sysproperty key="test.system.hdrc.deployed.hadoopconfdir"
+                     value="@{hadoop.conf.dir.deployed}" />
+        <!-- set io.compression.codec.lzo.class in the child jvm only if it is set -->
+        <syspropertyset dynamic="no">
+          <propertyref name="io.compression.codec.lzo.class" />
+        </syspropertyset>
+        <!-- set compile.c++ in the child jvm only if it is set -->
+        <syspropertyset dynamic="no">
+          <propertyref name="compile.c++" />
+        </syspropertyset>
+        <classpath refid="@{classpath}" />
+        <syspropertyset id="FaultProbabilityProperties">
+          <propertyref regex="fi.*" />
+        </syspropertyset>
+        <formatter type="${test.junit.output.format}" />
+        <batchtest todir="@{test.dir}" if="tests.notestcase">
+          <fileset dir="@{fileset.dir}"
+                   includes="**/${test.include}.java"
+                   excludes="**/${test.exclude}.java aop/** system/**" />
+        </batchtest>
+        <batchtest todir="${test.build.dir}" if="tests.notestcase.fi">
+          <fileset dir="${test.src.dir}/aop"
+                   includes="**/${test.include}.java"
+                   excludes="**/${test.exclude}.java" />
+        </batchtest>
+        <batchtest todir="@{test.dir}" if="tests.testcase">
+          <fileset dir="@{fileset.dir}"
+            includes="**/${testcase}.java" excludes="aop/** system/**"/>
+        </batchtest>
+        <batchtest todir="${test.build.dir}" if="tests.testcase.fi">
+          <fileset dir="${test.src.dir}/aop" includes="**/${testcase}.java" />
+        </batchtest>
+        <!--The following batch is for very special occasions only when
+                a non-FI tests are needed to be executed against FI-environment -->
+        <batchtest todir="${test.build.dir}" if="tests.testcaseonly">
+          <fileset dir="${test.src.dir}" includes="**/${testcase}.java" />
+        </batchtest>
+      </junit>
+      <antcall target="checkfailure"/>
+    </sequential>
+  </macrodef>
+
   <target name="checkfailure" if="tests.failed">
     <touch file="${test.build.dir}/testsfailed"/>
     <fail unless="continueOnFailure">Tests failed!</fail>
@@ -1400,6 +1439,32 @@
     </chmod>
   </target>
 
+  <target name="binary-system" depends="bin-package, jar-system, jar-test-system"
+     description="make system test package for deployment">
+    <copy todir="${system-test-build-dir}/${final.name}">
+      <fileset dir="${dist.dir}">
+      </fileset>
+    </copy>
+    <copy todir="${system-test-build-dir}/${final.name}" 
+      file="${system-test-build-dir}/${core.final.name}.jar" overwrite="true"/>
+    <copy todir="${system-test-build-dir}/${final.name}"
+      file="${system-test-build-dir}/${test.final.name}.jar" overwrite="true"/>
+    <macro_tar 
+      param.destfile="${system-test-build-dir}/${final.name}-bin.tar.gz">
+        <param.listofitems>
+          <tarfileset dir="${system-test-build-dir}" mode="664">
+            <exclude name="${final.name}/bin/*" />
+            <exclude name="${final.name}/src/**" />
+            <exclude name="${final.name}/docs/**" />
+            <include name="${final.name}/**" />
+          </tarfileset>
+          <tarfileset dir="${build.dir}" mode="755">
+            <include name="${final.name}/bin/*" />
+          </tarfileset>
+        </param.listofitems>
+      </macro_tar>
+  </target>
+  
   <target name="binary" depends="bin-package" description="Make tarball without source and documentation">
     <macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
       <param.listofitems>
@@ -1430,7 +1495,7 @@
   <!-- ================================================================== -->
   <!-- Clean.  Delete the build files, and their directories              -->
   <!-- ================================================================== -->
-  <target name="clean" depends="clean-contrib, clean-sign " description="Clean.  Delete the build files, and their directories">
+  <target name="clean" depends="clean-contrib, clean-sign, clean-fi" description="Clean.  Delete the build files, and their directories">
     <delete dir="${build.dir}"/>
     <delete dir="${docs.src}/build"/>
     <delete dir="${src.docs.cn}/build"/>

Modified: hadoop/common/branches/branch-0.20-security-patches/src/saveVersion.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/saveVersion.sh?rev=1077176&r1=1077175&r2=1077176&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/saveVersion.sh (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/saveVersion.sh Fri Mar  4 03:48:49 2011
@@ -37,7 +37,7 @@ mkdir -p $build_dir/src/org/apache/hadoo
 cat << EOF | \
   sed -e "s/VERSION/$version/" -e "s/USER/$user/" -e "s/DATE/$date/" \
       -e "s|URL|$url|" -e "s/REV/$revision/" \
-      > build/src/org/apache/hadoop/package-info.java
+      > $build_dir/src/org/apache/hadoop/package-info.java
 /*
  * Generated by src/saveVersion.sh
  */

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/aop/build/aop.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/aop/build/aop.xml?rev=1077176&r1=1077175&r2=1077176&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/aop/build/aop.xml (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/aop/build/aop.xml Fri Mar  4 03:48:49 2011
@@ -15,12 +15,18 @@
    limitations under the License.
 -->
 <project name="aspects">
+  <!-- Properties common for all fault injections -->
   <property name="build-fi.dir" value="${basedir}/build-fi"/>
   <property name="hadoop-fi.jar" location="${build.dir}/${final.name}-fi.jar" />
   <property name="compile-inject.output" value="${build-fi.dir}/compile-fi.log"/>
   <property name="aspectversion" value="1.6.5"/>
   <property file="${basedir}/build.properties"/>
 
+  <!-- Properties related to system fault injection and tests -->
+  <property name="system-test-build-dir" value="${build-fi.dir}/system"/>
+
+  <!-- Properties specifically for system fault-injections and system tests -->
+ 
   <!--All Fault Injection (FI) related targets are located in this session -->
     
   <target name="clean-fi">
@@ -44,10 +50,11 @@
     <echo message="Start weaving aspects in place"/>
     <iajc
       encoding="${build.encoding}" 
-      srcdir="${core.src.dir};${mapred.src.dir};${hdfs.src.dir};${build.src};${test.src.dir}/aop" 
-      includes="org/apache/hadoop/**/*.java, org/apache/hadoop/**/*.aj"
+      srcdir="${core.src.dir};${mapred.src.dir};${hdfs.src.dir};${build.src};
+              ${src.dir.path}"
+      includes="**/org/apache/hadoop/**/*.java, **/org/apache/hadoop/**/*.aj"
       excludes="org/apache/hadoop/record/**/*"
-      destDir="${build.classes}"
+      destDir="${dest.dir}"
       debug="${javac.debug}"
       target="${javac.version}"
       source="${javac.version}"
@@ -55,7 +62,15 @@
       fork="true"
       maxmem="256m"
       >
-      <classpath refid="test.classpath"/>
+
+      <classpath>
+       <path refid="test.classpath"/>
+       <fileset dir="${build-fi.dir}/test/testjar">
+          <include name="**/*.jar" />
+          <exclude name="**/excluded/" />
+       </fileset>
+     </classpath>
+
     </iajc>
     <loadfile property="injection.failure" srcfile="${compile-inject.output}">
      <filterchain>
@@ -70,16 +85,129 @@
     <echo message="Weaving of aspects is finished"/>
   </target>
 
-  <target name="injectfaults" 
-  	description="Instrument classes with faults and other AOP advices">
+  <!-- Classpath for running system tests -->
+  <path id="test.system.classpath">
+        <pathelement location="${hadoop.conf.dir.deployed}" />
+        <pathelement location="${hadoop.conf.dir}" />
+        <pathelement location="${system-test-build-dir}/test/extraconf" />
+        <pathelement location="${system-test-build-dir}/test/classes" />
+        <pathelement location="${system-test-build-dir}/classes" />
+        <pathelement location="${test.src.dir}" />
+        <pathelement location="${build-fi.dir}" />
+        <pathelement location="${build-fi.dir}/tools" />
+        <pathelement path="${clover.jar}" />
+        <fileset dir="${test.lib.dir}">
+          <include name="**/*.jar" />
+          <exclude name="**/excluded/" />
+        </fileset>
+        <fileset dir="${system-test-build-dir}">
+           <include name="**/*.jar" />
+           <exclude name="**/excluded/" />
+         </fileset>
+         <fileset dir="${build-fi.dir}/test/testjar">
+           <include name="**/*.jar" />
+           <exclude name="**/excluded/" />
+         </fileset>
+        <path refid="classpath" />
+  </path>
+
+  <!-- ================ -->
+  <!-- run system tests -->
+  <!-- ================ -->
+  <target name="test-system" depends="-test-system-deployed, -test-system-local"
+    description="Run system tests">
+  </target>
+
+  <target name="-test-system-local"
+    depends="ivy-retrieve-common, prepare-test-system" 
+    unless="hadoop.conf.dir.deployed">
+    <macro-jar-examples
+      build.dir="${system-test-build-dir}"
+      basedir="${system-test-build-dir}/examples">
+    </macro-jar-examples>
+    <macro-test-runner classpath="test.system.classpath"
+                       test.dir="${system-test-build-dir}/test"
+                       fileset.dir="${test.src.dir}/system/java"
+                       hadoop.home="${hadoop.home}"
+                       hadoop.conf.dir="${hadoop.conf.dir}">
+    </macro-test-runner>
+  </target>
+  <target name="-test-system-deployed"
+    depends="ivy-retrieve-common, prepare-test-system" 
+    if="hadoop.conf.dir.deployed">
+    <macro-jar-examples
+      build.dir="${system-test-build-dir}"
+      basedir="${system-test-build-dir}/examples">
+    </macro-jar-examples>
+    <macro-test-runner classpath="test.system.classpath"
+                       test.dir="${system-test-build-dir}/test"
+                       fileset.dir="${test.src.dir}/system/java"
+                       hadoop.home="${hadoop.home}"
+                       hadoop.conf.dir="${hadoop.conf.dir}"
+                       hadoop.conf.dir.deployed="${hadoop.conf.dir.deployed}">
+    </macro-test-runner>
+  </target>
+
+  <target name="prepare-test-system" depends="jar-test-system">
+    <subant buildpath="build.xml" target="inject-system-faults">
+      <property name="build.dir" value="${system-test-build-dir}" />
+    </subant>
+  </target>
+
+  <target name="injectfaults"
+          description="Instrument classes with faults and other AOP advices">
     <mkdir dir="${build-fi.dir}"/>
     <delete file="${compile-inject.output}"/>
-    <subant buildpath="${basedir}" target="compile-fault-inject"
-      output="${compile-inject.output}">
-      <property name="build.dir" value="${build-fi.dir}"/>
+    <weave-injectfault-aspects dest.dir="${build-fi.dir}/classes}"
+                               src.dir="${test.src.dir}/aop">
+    </weave-injectfault-aspects>
+  </target>
+
+  <!-- =============================================================== -->
+  <!-- Create hadoop-{version}-dev-core.jar required to be deployed on -->
+  <!-- cluster for system tests                                        -->
+  <!-- =============================================================== -->
+  <target name="jar-system"
+          depends="inject-system-faults"
+          description="make hadoop.jar">
+    <macro-jar-fault-inject target.name="jar"
+      build.dir="${system-test-build-dir}"
+      jar.final.name="final.name"
+      jar.final.value="${final.name}">
+    </macro-jar-fault-inject>
+  </target>
+
+  <target name="jar-test-system" depends="inject-system-faults"
+    description="Make hadoop-test.jar with system fault-injection">
+    <subant buildpath="build.xml" target="jar-test">
+      <property name="build.dir" value="${system-test-build-dir}"/>
+      <property name="test.build.classes"
+        value="${system-test-build-dir}/test/classes"/>
     </subant>
   </target>
 
+  <macrodef name="weave-injectfault-aspects">
+    <attribute name="dest.dir" />
+    <attribute name="src.dir" />
+    <sequential>
+      <subant buildpath="build.xml" target="compile-fault-inject"
+        output="${compile-inject.output}">
+        <property name="build.dir" value="${build-fi.dir}" />
+        <property name="src.dir.path" value="@{src.dir}" />
+        <property name="dest.dir" value="@{dest.dir}" />
+      </subant>
+    </sequential>
+  </macrodef>
+
+  <target name="inject-system-faults" description="Inject system faults">
+    <property name="build-fi.dir" value="${system-test-build-dir}" />
+    <mkdir dir="${build-fi.dir}"/>
+    <delete file="${compile-inject.output}"/>
+    <weave-injectfault-aspects dest.dir="${system-test-build-dir}/classes"
+                               src.dir="${test.src.dir}/system">
+    </weave-injectfault-aspects>
+    </target>
+
   <macrodef name="macro-run-tests-fault-inject">
     <attribute name="target.name" />
     <attribute name="testcasesonly" />
@@ -99,11 +227,12 @@
   <!-- ================================================================== -->
   <macrodef name="macro-jar-fault-inject">
     <attribute name="target.name" />
+    <attribute name="build.dir" />
     <attribute name="jar.final.name" />
     <attribute name="jar.final.value" />
     <sequential>
       <subant buildpath="build.xml" target="@{target.name}">
-        <property name="build.dir" value="${build-fi.dir}"/>
+        <property name="build.dir" value="@{build.dir}"/>
         <property name="@{jar.final.name}" value="@{jar.final.value}"/>
         <property name="jar.extra.properties.list" 
         	  value="${test.src.dir}/fi-site.xml" />

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JTProtocolAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JTProtocolAspect.aj?rev=1077176&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JTProtocolAspect.aj (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JTProtocolAspect.aj Fri Mar  4 03:48:49 2011
@@ -0,0 +1,64 @@
+package org.apache.hadoop.mapred;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.test.system.JTProtocol;
+import org.apache.hadoop.mapreduce.test.system.JobInfo;
+import org.apache.hadoop.mapreduce.test.system.TTInfo;
+import org.apache.hadoop.mapreduce.test.system.TaskInfo;
+
+/**
+ * Aspect which injects the basic protocol functionality which is to be
+ * implemented by all the services which implement {@link ClientProtocol}
+ * 
+ * Aspect also injects default implementation for the {@link JTProtocol}
+ */
+
+public aspect JTProtocolAspect {
+
+  // Make the ClientProtocl extend the JTprotocol
+  declare parents : JobSubmissionProtocol extends JTProtocol;
+
+  /*
+   * Start of default implementation of the methods in JTProtocol
+   */
+
+  public Configuration JTProtocol.getDaemonConf() throws IOException {
+    return null;
+  }
+
+  public JobInfo JTProtocol.getJobInfo(JobID jobID) throws IOException {
+    return null;
+  }
+
+  public TaskInfo JTProtocol.getTaskInfo(TaskID taskID) throws IOException {
+    return null;
+  }
+
+  public TTInfo JTProtocol.getTTInfo(String trackerName) throws IOException {
+    return null;
+  }
+
+  public JobInfo[] JTProtocol.getAllJobInfo() throws IOException {
+    return null;
+  }
+
+  public TaskInfo[] JTProtocol.getTaskInfo(JobID jobID) throws IOException {
+    return null;
+  }
+
+  public TTInfo[] JTProtocol.getAllTTInfo() throws IOException {
+    return null;
+  }
+  
+  public boolean JTProtocol.isJobRetired(JobID jobID) throws IOException {
+    return false;
+  }
+  
+  public String JTProtocol.getJobHistoryLocationForRetiredJob(JobID jobID) throws IOException {
+    return "";
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobClientAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobClientAspect.aj?rev=1077176&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobClientAspect.aj (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobClientAspect.aj Fri Mar  4 03:48:49 2011
@@ -0,0 +1,8 @@
+package org.apache.hadoop.mapred;
+
+public privileged aspect JobClientAspect {
+
+  public JobSubmissionProtocol JobClient.getProtocol() {
+    return jobSubmitClient;
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobInProgressAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobInProgressAspect.aj?rev=1077176&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobInProgressAspect.aj (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobInProgressAspect.aj Fri Mar  4 03:48:49 2011
@@ -0,0 +1,57 @@
+package org.apache.hadoop.mapred;
+
+import java.io.IOException;
+import org.apache.hadoop.mapreduce.test.system.JobInfo;
+
+/**
+ * Aspect to add a utility method in the JobInProgress for easing up the
+ * construction of the JobInfo object.
+ */
+privileged aspect JobInProgressAspect {
+
+  /**
+   * Returns a read only view of the JobInProgress object which is used by the
+   * client.
+   * 
+   * @return JobInfo of the current JobInProgress object
+   */
+  public JobInfo JobInProgress.getJobInfo() {
+    String historyLoc = getHistoryPath();
+    if (tasksInited.get()) {
+      return new JobInfoImpl(
+          this.getJobID(), this.isSetupLaunched(), this.isSetupFinished(), this
+              .isCleanupLaunched(), this.runningMaps(), this.runningReduces(),
+          this.pendingMaps(), this.pendingReduces(), this.finishedMaps(), this
+              .finishedReduces(), this.getStatus(), historyLoc, this
+              .getBlackListedTrackers(), false, this.numMapTasks,
+          this.numReduceTasks, this.isHistoryFileCopied());
+    } else {
+      return new JobInfoImpl(
+          this.getJobID(), false, false, false, 0, 0, this.pendingMaps(), this
+              .pendingReduces(), this.finishedMaps(), this.finishedReduces(),
+          this.getStatus(), historyLoc, this.getBlackListedTrackers(), this
+              .isComplete(), this.numMapTasks, this.numReduceTasks, 
+              this.isHistoryFileCopied());
+    }
+  }
+  
+  private String JobInProgress.getHistoryPath() {
+    String historyLoc = "";
+    if(this.isComplete()) {
+      historyLoc = this.getHistoryFile();
+    } else {
+      String historyFileName = null;
+      try {
+        historyFileName  = JobHistory.JobInfo.getJobHistoryFileName(conf, 
+            jobId);
+      } catch(IOException e) {
+      }
+      if(historyFileName != null) {
+        historyLoc = JobHistory.JobInfo.getJobHistoryLogLocation(
+            historyFileName).toString();
+      }
+    }
+    return historyLoc;
+  }
+
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobTrackerAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobTrackerAspect.aj?rev=1077176&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobTrackerAspect.aj (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/JobTrackerAspect.aj Fri Mar  4 03:48:49 2011
@@ -0,0 +1,234 @@
+package org.apache.hadoop.mapred;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.ArrayList;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobTracker.RetireJobInfo;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
+import org.apache.hadoop.mapreduce.test.system.JTProtocol;
+import org.apache.hadoop.mapreduce.test.system.JobInfo;
+import org.apache.hadoop.mapreduce.test.system.TTInfo;
+import org.apache.hadoop.mapreduce.test.system.TaskInfo;
+import org.apache.hadoop.test.system.DaemonProtocol;
+
+/**
+ * Aspect class which injects the code for {@link JobTracker} class.
+ * 
+ */
+public privileged aspect JobTrackerAspect {
+
+
+  public Configuration JobTracker.getDaemonConf() throws IOException {
+    return conf;
+  }
+  /**
+   * Method to get the read only view of the job and its associated information.
+   * 
+   * @param jobID
+   *          id of the job for which information is required.
+   * @return JobInfo of the job requested
+   * @throws IOException
+   */
+  public JobInfo JobTracker.getJobInfo(JobID jobID) throws IOException {
+    JobInProgress jip = jobs.get(org.apache.hadoop.mapred.JobID
+        .downgrade(jobID));
+    if (jip == null) {
+      LOG.warn("No job present for : " + jobID);
+      return null;
+    }
+    JobInfo info;
+    synchronized (jip) {
+      info = jip.getJobInfo();
+    }
+    return info;
+  }
+
+  /**
+   * Method to get the read only view of the task and its associated
+   * information.
+   * 
+   * @param taskID
+   * @return
+   * @throws IOException
+   */
+  public TaskInfo JobTracker.getTaskInfo(TaskID taskID) throws IOException {
+    TaskInProgress tip = getTip(org.apache.hadoop.mapred.TaskID
+        .downgrade(taskID));
+
+    if (tip == null) {
+      LOG.warn("No task present for : " + taskID);
+      return null;
+    }
+    TaskInfo info;
+    TaskStatus[] status = tip.getTaskStatuses();
+    synchronized (tip) {
+      if (status == null) {
+        if (tip.isMapTask()) {
+          status = new MapTaskStatus[]{};
+        }
+        else {
+          status = new ReduceTaskStatus[]{};
+        }
+      }
+      info = new TaskInfoImpl(tip.getTIPId(), tip.getProgress(), tip
+          .getActiveTasks().size(), tip.numKilledTasks(), 
+          tip.numTaskFailures(), status);
+    }
+    return info;
+  }
+
+  public TTInfo JobTracker.getTTInfo(String trackerName) throws IOException {
+    org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker tt = taskTrackers
+        .get(trackerName);
+    if (tt == null) {
+      LOG.warn("No task tracker with name : " + trackerName + " found");
+      return null;
+    }
+    TaskTrackerStatus status = tt.getStatus();
+    TTInfo info = new TTInfoImpl(status.trackerName, status);
+    return info;
+  }
+
+  // XXX Below two method don't reuse getJobInfo and getTaskInfo as there is a
+  // possibility that retire job can run and remove the job from JT memory
+  // during
+  // processing of the RPC call.
+  public JobInfo[] JobTracker.getAllJobInfo() throws IOException {
+    List<JobInfo> infoList = new ArrayList<JobInfo>();
+    synchronized (jobs) {
+      for (JobInProgress jip : jobs.values()) {
+        JobInfo info = jip.getJobInfo();
+        infoList.add(info);
+      }
+    }
+    return (JobInfo[]) infoList.toArray(new JobInfo[infoList.size()]);
+  }
+
+  public TaskInfo[] JobTracker.getTaskInfo(JobID jobID) throws IOException {
+    JobInProgress jip = jobs.get(org.apache.hadoop.mapred.JobID
+        .downgrade(jobID));
+    if (jip == null) {
+      LOG.warn("Unable to find job : " + jobID);
+      return null;
+    }
+    List<TaskInfo> infoList = new ArrayList<TaskInfo>();
+    TaskStatus[] status;
+    synchronized (jip) {
+      for (TaskInProgress tip : jip.setup) {
+        status = tip.getTaskStatuses();
+        if (status == null) {
+          if (tip.isMapTask()) {
+            status = new MapTaskStatus[]{};
+          }
+          else {
+            status = new ReduceTaskStatus[]{};
+          }
+        }
+        TaskInfo info = new TaskInfoImpl(tip.getTIPId(), tip.getProgress(), tip
+            .getActiveTasks().size(), tip.numKilledTasks(), tip
+            .numTaskFailures(), status);
+        infoList.add(info);
+      }
+      for (TaskInProgress tip : jip.maps) {
+        status = tip.getTaskStatuses();
+        if (status == null) {
+          status = new MapTaskStatus[]{};
+        }
+        TaskInfo info = new TaskInfoImpl(tip.getTIPId(), tip.getProgress(), tip
+            .getActiveTasks().size(), tip.numKilledTasks(), tip
+            .numTaskFailures(), status);
+        infoList.add(info);
+      }
+      for (TaskInProgress tip : jip.reduces) {
+        status = tip.getTaskStatuses();
+        if (status == null) {
+          status = new ReduceTaskStatus[]{};
+        }
+        TaskInfo info = new TaskInfoImpl(tip.getTIPId(), tip.getProgress(), tip
+            .getActiveTasks().size(), tip.numKilledTasks(), tip
+            .numTaskFailures(), status);
+        infoList.add(info);
+      }
+      for (TaskInProgress tip : jip.cleanup) {
+        status = tip.getTaskStatuses();
+        if (status == null) {
+          if (tip.isMapTask()) {
+            status = new MapTaskStatus[]{};
+          }
+          else {
+            status = new ReduceTaskStatus[]{};
+          }
+        }
+        TaskInfo info = new TaskInfoImpl(tip.getTIPId(), tip.getProgress(), tip
+            .getActiveTasks().size(), tip.numKilledTasks(), tip
+            .numTaskFailures(), status);
+        infoList.add(info);
+      }
+    }
+    return (TaskInfo[]) infoList.toArray(new TaskInfo[infoList.size()]);
+  }
+
+  public TTInfo[] JobTracker.getAllTTInfo() throws IOException {
+    List<TTInfo> infoList = new ArrayList<TTInfo>();
+    synchronized (taskTrackers) {
+      for (TaskTracker tt : taskTrackers.values()) {
+        TaskTrackerStatus status = tt.getStatus();
+        TTInfo info = new TTInfoImpl(status.trackerName, status);
+        infoList.add(info);
+      }
+    }
+    return (TTInfo[]) infoList.toArray(new TTInfo[infoList.size()]);
+  }
+  
+  public boolean JobTracker.isJobRetired(JobID id) throws IOException {
+    return retireJobs.get(
+        org.apache.hadoop.mapred.JobID.downgrade(id))!=null?true:false;
+  }
+
+  public String JobTracker.getJobHistoryLocationForRetiredJob(
+      JobID id) throws IOException {
+    RetireJobInfo retInfo = retireJobs.get(
+        org.apache.hadoop.mapred.JobID.downgrade(id));
+    if(retInfo == null) {
+      throw new IOException("The retired job information for the job : " 
+          + id +" is not found");
+    } else {
+      return retInfo.getHistoryFile();
+    }
+  }
+  pointcut getVersionAspect(String protocol, long clientVersion) : 
+    execution(public long JobTracker.getProtocolVersion(String , 
+      long) throws IOException) && args(protocol, clientVersion);
+
+  long around(String protocol, long clientVersion) :  
+    getVersionAspect(protocol, clientVersion) {
+    if (protocol.equals(DaemonProtocol.class.getName())) {
+      return DaemonProtocol.versionID;
+    } else if (protocol.equals(JTProtocol.class.getName())) {
+      return JTProtocol.versionID;
+    } else {
+      return proceed(protocol, clientVersion);
+    }
+  }
+
+  /**
+   * Point cut which monitors for the start of the jobtracker and sets the right
+   * value if the jobtracker is started.
+   * 
+   * @param conf
+   * @param jobtrackerIndentifier
+   */
+  pointcut jtConstructorPointCut(JobConf conf, String jobtrackerIndentifier) : 
+        call(JobTracker.new(JobConf,String)) 
+        && args(conf, jobtrackerIndentifier) ;
+
+  after(JobConf conf, String jobtrackerIndentifier) 
+    returning (JobTracker tracker): jtConstructorPointCut(conf, 
+        jobtrackerIndentifier) {
+    tracker.setReady(true);
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/TaskTrackerAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/TaskTrackerAspect.aj?rev=1077176&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/TaskTrackerAspect.aj (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/mapred/TaskTrackerAspect.aj Fri Mar  4 03:48:49 2011
@@ -0,0 +1,78 @@
+package org.apache.hadoop.mapred;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.ArrayList;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.test.system.JTProtocol;
+import org.apache.hadoop.mapreduce.test.system.TTProtocol;
+import org.apache.hadoop.mapreduce.test.system.TTTaskInfo;
+import org.apache.hadoop.mapred.TTTaskInfoImpl.MapTTTaskInfo;
+import org.apache.hadoop.mapred.TTTaskInfoImpl.ReduceTTTaskInfo;
+import org.apache.hadoop.test.system.DaemonProtocol;
+import org.apache.hadoop.test.system.DaemonProtocolAspect;
+
+public privileged aspect TaskTrackerAspect {
+
+  declare parents : TaskTracker implements TTProtocol;
+
+  // Add a last sent status field to the Tasktracker class.
+  TaskTrackerStatus TaskTracker.lastSentStatus = null;
+
+  public synchronized TaskTrackerStatus TaskTracker.getStatus()
+      throws IOException {
+    return lastSentStatus;
+  }
+
+  public Configuration TaskTracker.getDaemonConf() throws IOException {
+    return fConf;
+  }
+
+  public TTTaskInfo[] TaskTracker.getTasks() throws IOException {
+    List<TTTaskInfo> infoList = new ArrayList<TTTaskInfo>();
+    for (TaskInProgress tip : tasks.values()) {
+      TTTaskInfo info = null;
+      if (tip.task.isMapTask()) {
+        info = new MapTTTaskInfo(((MapTask) tip.task), tip.slotTaken,
+            tip.wasKilled, tip.diagnosticInfo.toString());
+      } else {
+        info = new ReduceTTTaskInfo(((ReduceTask) tip.task), tip.slotTaken,
+            tip.wasKilled, tip.diagnosticInfo.toString());
+      }
+      infoList.add(info);
+    }
+    return (TTTaskInfo[]) infoList.toArray(new TTTaskInfo[infoList.size()]);
+  }
+
+  before(TaskTrackerStatus newStatus, TaskTracker tracker) : 
+    set(TaskTrackerStatus TaskTracker.status) 
+    && args(newStatus) && this(tracker) {
+    if (newStatus == null) {
+      tracker.lastSentStatus = tracker.status;
+    }
+  }
+
+  pointcut ttConstructorPointCut(JobConf conf) : 
+    call(TaskTracker.new(JobConf)) 
+    && args(conf);
+
+  after(JobConf conf) returning (TaskTracker tracker): 
+    ttConstructorPointCut(conf) {
+    tracker.setReady(true);
+  }
+  
+  pointcut getVersionAspect(String protocol, long clientVersion) : 
+    execution(public long TaskTracker.getProtocolVersion(String , 
+      long) throws IOException) && args(protocol, clientVersion);
+
+  long around(String protocol, long clientVersion) :  
+    getVersionAspect(protocol, clientVersion) {
+    if(protocol.equals(DaemonProtocol.class.getName())) {
+      return DaemonProtocol.versionID;
+    } else if(protocol.equals(TTProtocol.class.getName())) {
+      return TTProtocol.versionID;
+    } else {
+      return proceed(protocol, clientVersion);
+    }
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/test/system/DaemonProtocolAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/test/system/DaemonProtocolAspect.aj?rev=1077176&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/test/system/DaemonProtocolAspect.aj (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/aop/org/apache/hadoop/test/system/DaemonProtocolAspect.aj Fri Mar  4 03:48:49 2011
@@ -0,0 +1,143 @@
+package org.apache.hadoop.test.system;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * Default DaemonProtocolAspect which is used to provide default implementation
+ * for all the common daemon methods. If a daemon requires more specialized
+ * version of method, it is responsibility of the DaemonClient to introduce the
+ * same in woven classes.
+ * 
+ */
+public aspect DaemonProtocolAspect {
+
+  private boolean DaemonProtocol.ready;
+
+  /**
+   * Set if the daemon process is ready or not, concrete daemon protocol should
+   * implement pointcuts to determine when the daemon is ready and use the
+   * setter to set the ready state.
+   * 
+   * @param ready
+   *          true if the Daemon is ready.
+   */
+  public void DaemonProtocol.setReady(boolean ready) {
+    this.ready = ready;
+  }
+
+  /**
+   * Checks if the daemon process is alive or not.
+   * 
+   * @throws IOException
+   *           if daemon is not alive.
+   */
+  public void DaemonProtocol.ping() throws IOException {
+  }
+
+  /**
+   * Checks if the daemon process is ready to accepting RPC connections after it
+   * finishes initialization. <br/>
+   * 
+   * @return true if ready to accept connection.
+   * 
+   * @throws IOException
+   */
+  public boolean DaemonProtocol.isReady() throws IOException {
+    return ready;
+  }
+
+  /**
+   * Returns the process related information regarding the daemon process. <br/>
+   * 
+   * @return process information.
+   * @throws IOException
+   */
+  public ProcessInfo DaemonProtocol.getProcessInfo() throws IOException {
+    int activeThreadCount = Thread.activeCount();
+    long currentTime = System.currentTimeMillis();
+    long maxmem = Runtime.getRuntime().maxMemory();
+    long freemem = Runtime.getRuntime().freeMemory();
+    long totalmem = Runtime.getRuntime().totalMemory();
+    Map<String, String> envMap = System.getenv();
+    Properties sysProps = System.getProperties();
+    Map<String, String> props = new HashMap<String, String>();
+    for (Map.Entry entry : sysProps.entrySet()) {
+      props.put((String) entry.getKey(), (String) entry.getValue());
+    }
+    ProcessInfo info = new ProcessInfoImpl(activeThreadCount, currentTime,
+        freemem, maxmem, totalmem, envMap, props);
+    return info;
+  }
+
+  public void DaemonProtocol.enable(List<Enum<?>> faults) throws IOException {
+  }
+
+  public void DaemonProtocol.disableAll() throws IOException {
+  }
+
+  public abstract Configuration DaemonProtocol.getDaemonConf()
+    throws IOException;
+
+  public FileStatus DaemonProtocol.getFileStatus(String path, boolean local) 
+    throws IOException {
+    Path p = new Path(path);
+    FileSystem fs = getFS(p, local);
+    p.makeQualified(fs);
+    FileStatus fileStatus = fs.getFileStatus(p);
+    return cloneFileStatus(fileStatus);
+  }
+
+  public FileStatus[] DaemonProtocol.listStatus(String path, boolean local) 
+    throws IOException {
+    Path p = new Path(path);
+    FileSystem fs = getFS(p, local);
+    FileStatus[] status = fs.listStatus(p);
+    if (status != null) {
+      FileStatus[] result = new FileStatus[status.length];
+      int i = 0;
+      for (FileStatus fileStatus : status) {
+        result[i++] = cloneFileStatus(fileStatus);
+      }
+      return result;
+    }
+    return status;
+  }
+
+  /**
+   * FileStatus object may not be serializable. Clone it into raw FileStatus 
+   * object.
+   */
+  private FileStatus DaemonProtocol.cloneFileStatus(FileStatus fileStatus) {
+    return new FileStatus(fileStatus.getLen(),
+        fileStatus.isDir(),
+        fileStatus.getReplication(),
+        fileStatus.getBlockSize(),
+        fileStatus.getModificationTime(),
+        fileStatus.getAccessTime(),
+        fileStatus.getPermission(),
+        fileStatus.getOwner(),
+        fileStatus.getGroup(),
+        fileStatus.getPath());
+  }
+
+  private FileSystem DaemonProtocol.getFS(Path path, boolean local) 
+    throws IOException {
+    FileSystem fs = null;
+    if (local) {
+      fs = FileSystem.getLocal(getDaemonConf());
+    } else {
+      fs = path.getFileSystem(getDaemonConf());
+    }
+    return fs;
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/JobInfoImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/JobInfoImpl.java?rev=1077176&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/JobInfoImpl.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/JobInfoImpl.java Fri Mar  4 03:48:49 2011
@@ -0,0 +1,197 @@
+package org.apache.hadoop.mapred;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.hadoop.mapred.JobStatus;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.test.system.JobInfo;
+
+/**
+ * Concrete implementation of the JobInfo interface which is exposed to the
+ * clients.
+ * Look at {@link JobInfo} for further details.
+ */
+class JobInfoImpl implements JobInfo {
+
+  private List<String> blackListedTracker;
+  private String historyUrl;
+  private JobID id;
+  private boolean setupLaunched;
+  private boolean setupFinished;
+  private boolean cleanupLaunched;
+  private JobStatus status;
+  private int runningMaps;
+  private int runningReduces;
+  private int waitingMaps;
+  private int waitingReduces;
+  private int finishedMaps;
+  private int finishedReduces;
+  private int numMaps;
+  private int numReduces;
+  private boolean historyCopied;
+
+  public JobInfoImpl() {
+    id = new JobID();
+    status = new JobStatus();
+    blackListedTracker = new LinkedList<String>();
+    historyUrl = "";
+  }
+  
+  public JobInfoImpl(
+      JobID id, boolean setupLaunched, boolean setupFinished,
+      boolean cleanupLaunched, int runningMaps, int runningReduces,
+      int waitingMaps, int waitingReduces, int finishedMaps,
+      int finishedReduces, JobStatus status, String historyUrl,
+      List<String> blackListedTracker, boolean isComplete, int numMaps,
+      int numReduces, boolean historyCopied) {
+    super();
+    this.blackListedTracker = blackListedTracker;
+    this.historyUrl = historyUrl;
+    this.id = id;
+    this.setupLaunched = setupLaunched;
+    this.setupFinished = setupFinished;
+    this.cleanupLaunched = cleanupLaunched;
+    this.status = status;
+    this.runningMaps = runningMaps;
+    this.runningReduces = runningReduces;
+    this.waitingMaps = waitingMaps;
+    this.waitingReduces = waitingReduces;
+    this.finishedMaps = finishedMaps;
+    this.finishedReduces = finishedReduces;
+    this.numMaps = numMaps;
+    this.numReduces = numReduces;
+    this.historyCopied = historyCopied;
+  }
+
+  @Override
+  public List<String> getBlackListedTrackers() {
+    return blackListedTracker;
+  }
+
+  @Override
+  public String getHistoryUrl() {
+    return historyUrl;
+  }
+
+  @Override
+  public JobID getID() {
+    return id;
+  }
+
+  @Override
+  public JobStatus getStatus() {
+    return status;
+  }
+
+  @Override
+  public boolean isCleanupLaunched() {
+    return cleanupLaunched;
+  }
+
+  @Override
+  public boolean isSetupLaunched() {
+    return setupLaunched;
+  }
+
+  @Override
+  public boolean isSetupFinished() {
+    return setupFinished;
+  }
+
+  @Override
+  public int runningMaps() {
+    return runningMaps;
+  }
+
+  @Override
+  public int runningReduces() {
+    return runningReduces;
+  }
+
+  @Override
+  public int waitingMaps() {
+    return waitingMaps;
+  }
+
+  @Override
+  public int waitingReduces() {
+    return waitingReduces;
+  }
+ 
+  @Override
+  public int finishedMaps() {
+    return finishedMaps;
+  }
+
+  @Override
+  public int finishedReduces() {
+    return finishedReduces;
+  }
+  
+  @Override
+  public int numMaps() {
+    return numMaps;
+  }
+  
+  @Override
+  public int numReduces() {
+    return numReduces;
+  }
+  
+  @Override
+  public boolean isHistoryFileCopied() {
+    return historyCopied;
+  }
+  
+  @Override
+  public void readFields(DataInput in) throws IOException {
+    id.readFields(in);
+    setupLaunched = in.readBoolean();
+    setupFinished = in.readBoolean();
+    cleanupLaunched = in.readBoolean();
+    status.readFields(in);
+    runningMaps = in.readInt();
+    runningReduces = in.readInt();
+    waitingMaps = in.readInt();
+    waitingReduces = in.readInt();
+    historyUrl = in.readUTF();
+    int size = in.readInt();
+    for (int i = 0; i < size; i++) {
+      blackListedTracker.add(in.readUTF());
+    }
+    finishedMaps = in.readInt();
+    finishedReduces = in.readInt();
+    numMaps = in.readInt();
+    numReduces = in.readInt();
+    historyCopied = in.readBoolean();
+  }
+
+  @Override
+  public void write(DataOutput out) throws IOException {
+    id.write(out);
+    out.writeBoolean(setupLaunched);
+    out.writeBoolean(setupFinished);
+    out.writeBoolean(cleanupLaunched);
+    status.write(out);
+    out.writeInt(runningMaps);
+    out.writeInt(runningReduces);
+    out.writeInt(waitingMaps);
+    out.writeInt(waitingReduces);
+    out.writeUTF(historyUrl);
+    out.writeInt(blackListedTracker.size());
+    for (String str : blackListedTracker) {
+      out.writeUTF(str);
+    }
+    out.writeInt(finishedMaps);
+    out.writeInt(finishedReduces);
+    out.writeInt(numMaps);
+    out.writeInt(numReduces);
+    out.writeBoolean(historyCopied);
+  }
+
+
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TTInfoImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TTInfoImpl.java?rev=1077176&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TTInfoImpl.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TTInfoImpl.java Fri Mar  4 03:48:49 2011
@@ -0,0 +1,54 @@
+package org.apache.hadoop.mapred;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.mapred.TaskTrackerStatus;
+import org.apache.hadoop.mapreduce.test.system.TTInfo;
+
+/**
+ * Concrete implementation of the TaskTracker information which is passed to 
+ * the client from JobTracker.
+ * Look at {@link TTInfo}
+ */
+
+class TTInfoImpl implements TTInfo {
+
+  private String taskTrackerName;
+  private TaskTrackerStatus status;
+
+  public TTInfoImpl() {
+    taskTrackerName = "";
+    status = new TaskTrackerStatus();
+  }
+  
+  public TTInfoImpl(String taskTrackerName, TaskTrackerStatus status) {
+    super();
+    this.taskTrackerName = taskTrackerName;
+    this.status = status;
+  }
+
+  @Override
+  public String getName() {
+    return taskTrackerName;
+  }
+
+  @Override
+  public TaskTrackerStatus getStatus() {
+    return status;
+  }
+
+  @Override
+  public void readFields(DataInput in) throws IOException {
+    taskTrackerName = in.readUTF();
+    status.readFields(in);
+  }
+
+  @Override
+  public void write(DataOutput out) throws IOException {
+    out.writeUTF(taskTrackerName);
+    status.write(out);
+  }
+
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TTTaskInfoImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TTTaskInfoImpl.java?rev=1077176&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TTTaskInfoImpl.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TTTaskInfoImpl.java Fri Mar  4 03:48:49 2011
@@ -0,0 +1,96 @@
+package org.apache.hadoop.mapred;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.mapred.MapTask;
+import org.apache.hadoop.mapred.ReduceTask;
+import org.apache.hadoop.mapred.Task;
+import org.apache.hadoop.mapreduce.test.system.TTTaskInfo;
+/**
+ * Abstract class which passes the Task view of the TaskTracker to the client.
+ * See {@link TTInfoImpl} for further details.
+ *
+ */
+abstract class TTTaskInfoImpl implements TTTaskInfo {
+
+  private String diagonsticInfo;
+  private Task task;
+  private boolean slotTaken;
+  private boolean wasKilled;
+
+  public TTTaskInfoImpl() {
+  }
+
+  public TTTaskInfoImpl(Task task, boolean slotTaken, boolean wasKilled,
+      String diagonsticInfo) {
+    super();
+    this.diagonsticInfo = diagonsticInfo;
+    this.task = task;
+    this.slotTaken = slotTaken;
+    this.wasKilled = wasKilled;
+  }
+
+  @Override
+  public String getDiagnosticInfo() {
+    return diagonsticInfo;
+  }
+
+  @Override
+  public Task getTask() {
+    return task;
+  }
+
+  @Override
+  public boolean slotTaken() {
+    return slotTaken;
+  }
+
+  @Override
+  public boolean wasKilled() {
+    return wasKilled;
+  }
+
+  @Override
+  public void readFields(DataInput in) throws IOException {
+    task.readFields(in);
+    slotTaken = in.readBoolean();
+    wasKilled = in.readBoolean();
+    diagonsticInfo = in.readUTF();
+  }
+
+  @Override
+  public void write(DataOutput out) throws IOException {
+    task.write(out);
+    out.writeBoolean(slotTaken);
+    out.writeBoolean(wasKilled);
+    out.writeUTF(diagonsticInfo);
+  }
+
+  static class MapTTTaskInfo extends TTTaskInfoImpl {
+
+    public MapTTTaskInfo() {
+      super(new MapTask(), false, false, "");
+    }
+
+    public MapTTTaskInfo(MapTask task, boolean slotTaken, boolean wasKilled,
+        String diagonsticInfo) {
+      super(task, slotTaken, wasKilled, diagonsticInfo);
+    }
+  }
+
+  static class ReduceTTTaskInfo extends TTTaskInfoImpl {
+
+    public ReduceTTTaskInfo() {
+      super(new ReduceTask(), false, false, "");
+    }
+
+    public ReduceTTTaskInfo(ReduceTask task, boolean slotTaken,
+        boolean wasKilled, String diagonsticInfo) {
+      super(task, slotTaken, wasKilled, diagonsticInfo);
+    }
+
+  }
+
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TaskInfoImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TaskInfoImpl.java?rev=1077176&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TaskInfoImpl.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TaskInfoImpl.java Fri Mar  4 03:48:49 2011
@@ -0,0 +1,116 @@
+package org.apache.hadoop.mapred;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.mapred.TaskStatus;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.test.system.TaskInfo;
+
+/**
+ * Concrete class to expose out the task related information to the Clients from
+ * the JobTracker.
+ * Look at {@link TaskInfo} for further details.
+ */
+class TaskInfoImpl implements TaskInfo {
+
+  private double progress;
+  private TaskID taskID;
+  private int killedAttempts;
+  private int failedAttempts;
+  private int runningAttempts;
+  private TaskStatus[] taskStatus;
+
+  public TaskInfoImpl() {
+    taskID = new TaskID();
+  }
+  public TaskInfoImpl(TaskID taskID, double progress, int runningAttempts,
+      int killedAttempts, int failedAttempts, TaskStatus[] taskStatus) {
+    this.progress = progress;
+    this.taskID = taskID;
+    this.killedAttempts = killedAttempts;
+    this.failedAttempts = failedAttempts;
+    this.runningAttempts = runningAttempts;
+    if (taskStatus != null) {
+      this.taskStatus = taskStatus;
+    }
+    else { 
+      if (taskID.isMap()) {
+        this.taskStatus = new MapTaskStatus[]{};
+      }
+      else {
+        this.taskStatus = new ReduceTaskStatus[]{};
+      }
+    }
+    
+  }
+
+  @Override
+  public double getProgress() {
+    return progress;
+  }
+
+  @Override
+  public TaskID getTaskID() {
+    return taskID;
+  }
+
+  @Override
+  public int numKilledAttempts() {
+    return killedAttempts;
+  }
+
+  @Override
+  public int numFailedAttempts() {
+    return failedAttempts;
+  }
+
+  @Override
+  public int numRunningAttempts() {
+    return runningAttempts;
+  }
+
+  @Override
+  public void readFields(DataInput in) throws IOException {
+    taskID.readFields(in);
+    progress = in.readDouble();
+    runningAttempts = in.readInt();
+    killedAttempts = in.readInt();
+    failedAttempts = in.readInt();
+    int size = in.readInt();
+    if (taskID.isMap()) {
+      taskStatus = new MapTaskStatus[size];
+    }
+    else {
+      taskStatus = new ReduceTaskStatus[size];
+    }
+    for (int i = 0; i < size; i++) {
+      if (taskID.isMap()) {
+        taskStatus[i] = new MapTaskStatus();
+      }
+      else {
+        taskStatus[i] = new ReduceTaskStatus();
+      }
+      taskStatus[i].readFields(in);
+    }
+  }
+
+  @Override
+  public void write(DataOutput out) throws IOException {
+    taskID.write(out);
+    out.writeDouble(progress);
+    out.writeInt(runningAttempts);
+    out.writeInt(killedAttempts);
+    out.writeInt(failedAttempts);
+    out.writeInt(taskStatus.length);
+    for (TaskStatus t : taskStatus) {
+      t.write(out);
+    }
+  }
+  
+  @Override
+  public TaskStatus[] getTaskStatus() {
+    return taskStatus;
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestCluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestCluster.java?rev=1077176&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestCluster.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestCluster.java Fri Mar  4 03:48:49 2011
@@ -0,0 +1,101 @@
+package org.apache.hadoop.mapred;
+
+import java.util.Collection;
+
+import junit.framework.Assert;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.examples.SleepJob;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.mapreduce.test.system.JTClient;
+import org.apache.hadoop.mapreduce.test.system.JTProtocol;
+import org.apache.hadoop.mapreduce.test.system.JobInfo;
+import org.apache.hadoop.mapreduce.test.system.MRCluster;
+import org.apache.hadoop.mapreduce.test.system.TTClient;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestCluster {
+
+  private static final Log LOG = LogFactory.getLog(TestCluster.class);
+
+  private static MRCluster cluster;
+
+  public TestCluster() throws Exception {
+    
+  }
+
+  @BeforeClass
+  public static void before() throws Exception {
+    cluster = MRCluster.createCluster(new Configuration());
+    cluster.setUp();
+  }
+
+  @AfterClass
+  public static void after() throws Exception {
+    cluster.tearDown();
+  }
+
+  @Test
+  public void testProcessInfo() throws Exception {
+    LOG.info("Process info of master is : "
+        + cluster.getMaster().getProcessInfo());
+    Assert.assertNotNull(cluster.getMaster().getProcessInfo());
+    Collection<TTClient> slaves = cluster.getSlaves().values();
+    for (TTClient slave : slaves) {
+      LOG.info("Process info of slave is : " + slave.getProcessInfo());
+      Assert.assertNotNull(slave.getProcessInfo());
+    }
+  }
+  
+  @Test
+  public void testJobSubmission() throws Exception {
+    Configuration conf = new Configuration(cluster.getConf());
+    JTProtocol wovenClient = cluster.getMaster().getProxy();
+    JobInfo[] jobs = wovenClient.getAllJobInfo();
+    SleepJob job = new SleepJob();
+    job.setConf(conf);
+    conf = job.setupJobConf(1, 1, 100, 100, 100, 100);
+    RunningJob rJob = cluster.getMaster().submitAndVerifyJob(conf);
+    cluster.getMaster().verifyJobHistory(rJob.getID());
+  }
+
+  @Test
+  public void testFileStatus() throws Exception {
+    JTClient jt = cluster.getMaster();
+    String dir = ".";
+    checkFileStatus(jt.getFileStatus(dir, true));
+    checkFileStatus(jt.listStatus(dir, false, true), dir);
+    for (TTClient tt : cluster.getSlaves().values()) {
+      String[] localDirs = tt.getMapredLocalDirs();
+      for (String localDir : localDirs) {
+        checkFileStatus(tt.listStatus(localDir, true, false), localDir);
+        checkFileStatus(tt.listStatus(localDir, true, true), localDir);
+      }
+    }
+    String systemDir = jt.getClient().getSystemDir().toString();
+    checkFileStatus(jt.listStatus(systemDir, false, true), systemDir);
+    checkFileStatus(jt.listStatus(jt.getLogDir(), true, true), jt.getLogDir());
+  }
+
+  private void checkFileStatus(FileStatus[] fs, String path) {
+    Assert.assertNotNull(fs);
+    LOG.info("-----Listing for " + path + "  " + fs.length);
+    for (FileStatus fz : fs) {
+      checkFileStatus(fz);
+    }
+  }
+
+  private void checkFileStatus(FileStatus fz) {
+    Assert.assertNotNull(fz);
+    LOG.info("FileStatus is " + fz.getPath() 
+        + "  " + fz.getPermission()
+        +"  " + fz.getOwner()
+        +"  " + fz.getGroup()
+        +"  " + fz.getClass());
+  }
+
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestSortValidate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestSortValidate.java?rev=1077176&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestSortValidate.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestSortValidate.java Fri Mar  4 03:48:49 2011
@@ -0,0 +1,181 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred;
+
+import junit.framework.Assert;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.examples.RandomWriter;
+import org.apache.hadoop.examples.Sort;
+
+import org.apache.hadoop.mapreduce.test.system.JTProtocol;
+import org.apache.hadoop.mapreduce.test.system.JobInfo;
+import org.apache.hadoop.mapreduce.test.system.MRCluster;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * A System test to test the Map-Reduce framework's sort 
+ * with a real Map-Reduce Cluster.
+ */
+public class TestSortValidate {
+  // Input/Output paths for sort
+  private static final Path SORT_INPUT_PATH = new Path("inputDirectory");
+  private static final Path SORT_OUTPUT_PATH = new Path("outputDirectory");
+
+  // make it big enough to cause a spill in the map
+  private static final int RW_BYTES_PER_MAP = 3 * 1024 * 1024;
+  private static final int RW_MAPS_PER_HOST = 2;
+
+  private MRCluster cluster = null;
+  private FileSystem dfs = null;
+  private JobClient client = null;
+
+  private static final Log LOG = LogFactory.getLog(TestSortValidate.class);
+
+  public TestSortValidate()
+  throws Exception {
+    cluster = MRCluster.createCluster(new Configuration());
+  }
+
+  @Before
+  public void setUp() throws java.lang.Exception {
+    cluster.setUp();
+    client = cluster.getMaster().getClient();
+
+    dfs = client.getFs();
+    dfs.delete(SORT_INPUT_PATH, true);
+    dfs.delete(SORT_OUTPUT_PATH, true);
+  }
+
+  @After
+  public void after() throws Exception {
+    cluster.tearDown();
+    dfs.delete(SORT_INPUT_PATH, true);
+    dfs.delete(SORT_OUTPUT_PATH, true);
+  }
+
+  public void runRandomWriter(Configuration job, Path sortInput) 
+  throws Exception {
+    // Scale down the default settings for RandomWriter for the test-case
+    // Generates NUM_HADOOP_SLAVES * RW_MAPS_PER_HOST * RW_BYTES_PER_MAP
+    job.setInt("test.randomwrite.bytes_per_map", RW_BYTES_PER_MAP);
+    job.setInt("test.randomwriter.maps_per_host", RW_MAPS_PER_HOST);
+    String[] rwArgs = {sortInput.toString()};
+ 
+    runAndVerify(job,new RandomWriter(), rwArgs);
+  }
+
+  private void runAndVerify(Configuration job, Tool tool, String[] args)
+    throws Exception {
+
+    // This calculates the previous number fo jobs submitted before a new
+    // job gets submitted.
+    int prevJobsNum = 0;
+
+    // JTProtocol wovenClient
+    JTProtocol wovenClient = cluster.getMaster().getProxy();
+
+    // JobStatus
+    JobStatus[] jobStatus = null;
+
+    // JobID
+    JobID id = null;
+
+    // RunningJob rJob;
+    RunningJob rJob = null;
+
+    // JobInfo jInfo;
+    JobInfo jInfo = null;
+
+    //Getting the previous job numbers that are submitted.
+    jobStatus = client.getAllJobs();
+    prevJobsNum = jobStatus.length;
+
+    // Run RandomWriter
+    Assert.assertEquals(ToolRunner.run(job, tool, args), 0);
+
+    //Waiting for the job to appear in the jobstatus
+    jobStatus = client.getAllJobs();
+
+    while (jobStatus.length - prevJobsNum == 0) {
+      LOG.info("Waiting for the job to appear in the jobStatus");
+      Thread.sleep(1000);
+      jobStatus = client.getAllJobs();
+    }
+
+    //Getting the jobId of the just submitted job
+    //The just submitted job is always added in the first slot of jobstatus
+    id = jobStatus[0].getJobID();
+
+    rJob = client.getJob(id);
+
+    jInfo = wovenClient.getJobInfo(id);
+
+    //Making sure that the job is complete.
+    while (jInfo != null && !jInfo.getStatus().isJobComplete()) {
+      Thread.sleep(10000);
+      jInfo = wovenClient.getJobInfo(id);
+    }
+
+    cluster.getMaster().verifyCompletedJob(id);
+  }
+  
+  private void runSort(Configuration job, Path sortInput, Path sortOutput) 
+  throws Exception {
+
+    job.setInt("io.sort.mb", 1);
+
+    // Setup command-line arguments to 'sort'
+    String[] sortArgs = {sortInput.toString(), sortOutput.toString()};
+    
+    runAndVerify(job,new Sort(), sortArgs);
+
+  }
+  
+  private void runSortValidator(Configuration job, 
+                                       Path sortInput, Path sortOutput) 
+  throws Exception {
+    String[] svArgs = {"-sortInput", sortInput.toString(), 
+                       "-sortOutput", sortOutput.toString()};
+
+    runAndVerify(job,new SortValidator(), svArgs);
+
+  }
+ 
+  @Test 
+  public void testMapReduceSort() throws Exception {
+    // Run randomwriter to generate input for 'sort'
+    runRandomWriter(cluster.getConf(), SORT_INPUT_PATH);
+
+    // Run sort
+    runSort(cluster.getConf(), SORT_INPUT_PATH, SORT_OUTPUT_PATH);
+
+    // Run sort-validator to check if sort worked correctly
+    runSortValidator(cluster.getConf(), SORT_INPUT_PATH, 
+                     SORT_OUTPUT_PATH);
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestTaskOwner.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestTaskOwner.java?rev=1077176&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestTaskOwner.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestTaskOwner.java Fri Mar  4 03:48:49 2011
@@ -0,0 +1,129 @@
+package org.apache.hadoop.mapred;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.StringTokenizer;
+
+import junit.framework.Assert;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+
+import org.apache.hadoop.examples.SleepJob;
+import org.apache.hadoop.examples.WordCount.IntSumReducer;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapred.TextOutputFormat;
+
+import org.apache.hadoop.mapreduce.test.system.JTClient;
+import org.apache.hadoop.mapreduce.test.system.MRCluster;
+import org.apache.hadoop.mapreduce.test.system.TTClient;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.io.Text;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import testjar.UserNamePermission;
+
+public class TestTaskOwner {
+  private static final Log LOG = LogFactory.getLog(TestTaskOwner.class);
+  private static Path outDir = new Path("output");
+  private static Path inDir = new Path("input");
+  public static MRCluster cluster;
+
+  // The role of this job is to write the user name to the output file
+  // which will be parsed
+
+  @BeforeClass
+  public static void setUp() throws java.lang.Exception {
+
+    cluster = MRCluster.createCluster(new Configuration());
+    cluster.setUp();
+    FileSystem fs = inDir.getFileSystem(cluster.getMaster().getConf());
+    fs.create(inDir);
+  }
+
+  @Test
+  public void testProcessPermission() throws Exception {
+  // The user will submit a job which a plain old map reduce job
+  // this job will output the username of the task that is running
+  // in the cluster and we will authenticate whether matches
+  // with the job that is submitted by the same user.
+
+    Configuration conf = cluster.getMaster().getConf();
+    Job job = new Job(conf, "user name check");
+
+    job.setJarByClass(UserNamePermission.class);
+    job.setMapperClass(UserNamePermission.UserNameMapper.class);
+    job.setCombinerClass(UserNamePermission.UserNameReducer.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(Text.class);
+
+    job.setReducerClass(UserNamePermission.UserNameReducer.class);
+    job.setNumReduceTasks(1);
+
+    FileInputFormat.addInputPath(job, inDir);
+    FileOutputFormat.setOutputPath(job, outDir);
+
+    job.waitForCompletion(true);
+
+    // now verify the user name that is written by the task tracker is same
+    // as the
+    // user name that was used to launch the task in the first place
+    FileSystem fs = outDir.getFileSystem(conf);
+    StringBuffer result = new StringBuffer();
+
+    Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir,
+     new Utils.OutputFileUtils.OutputFilesFilter()));
+
+    for (int i = 0; i < fileList.length; ++i) {
+	  LOG.info("File list[" + i + "]" + ": " + fileList[i]);
+	  BufferedReader file = new BufferedReader(new InputStreamReader(fs
+      .open(fileList[i])));
+       String line = file.readLine();
+       while (line != null) {
+         StringTokenizer token = new StringTokenizer(line);
+         if (token.hasMoreTokens()) {
+           LOG.info("First token " + token.nextToken());
+           String userName = token.nextToken();
+
+           LOG.info("Next token " + userName);
+           Assert
+             .assertEquals(
+              "The user name did not match permission violation ",
+               userName, System.getProperty("user.name")
+              .toString());
+           break;
+         }
+
+        }
+        file.close();
+     }
+
+  }
+
+  @AfterClass
+  public static void tearDown() throws java.lang.Exception {
+    FileSystem fs = outDir.getFileSystem(cluster.getMaster().getConf());
+    fs.delete(outDir, true);
+    cluster.tearDown();
+   }
+
+}
+
+



Mime
View raw message