hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t...@apache.org
Subject svn commit: r1463203 - in /hadoop/common/branches/HDFS-347: ./ dev-support/ hadoop-assemblies/src/main/resources/assemblies/ hadoop-client/ hadoop-dist/ hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ hadoop-project-dist/ hado...
Date Mon, 01 Apr 2013 16:47:25 GMT
Author: todd
Date: Mon Apr  1 16:47:16 2013
New Revision: 1463203

URL: http://svn.apache.org/r1463203
Log:
Complete commit of prior merge.

The previous merge accidentally only committed the hdfs project instead of common and MR
as well.

Added:
    hadoop/common/branches/HDFS-347/dev-support/findHangingTest.sh
      - copied unchanged from r1462625, hadoop/common/trunk/dev-support/findHangingTest.sh
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/DummyResourceCalculatorPlugin.java
      - copied unchanged from r1462625, hadoop/common/trunk/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/DummyResourceCalculatorPlugin.java
Modified:
    hadoop/common/branches/HDFS-347/   (props changed)
    hadoop/common/branches/HDFS-347/BUILDING.txt
    hadoop/common/branches/HDFS-347/dev-support/test-patch.sh
    hadoop/common/branches/HDFS-347/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
    hadoop/common/branches/HDFS-347/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
    hadoop/common/branches/HDFS-347/hadoop-client/pom.xml
    hadoop/common/branches/HDFS-347/hadoop-dist/pom.xml
    hadoop/common/branches/HDFS-347/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java
    hadoop/common/branches/HDFS-347/hadoop-project-dist/pom.xml
    hadoop/common/branches/HDFS-347/hadoop-project/pom.xml
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/ThrottledInputStream.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/LoadJob.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/CumulativeCpuUsageEmulatorPlugin.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageEmulatorPlugin.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageMatcher.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/TotalHeapUsageEmulatorPlugin.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixMemoryEmulation.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixSummary.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestResourceUsageEmulators.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-pipes/src/CMakeLists.txt
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java
    hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java

Propchange: hadoop/common/branches/HDFS-347/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk:r1446831-1462625

Modified: hadoop/common/branches/HDFS-347/BUILDING.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/BUILDING.txt?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/BUILDING.txt (original)
+++ hadoop/common/branches/HDFS-347/BUILDING.txt Mon Apr  1 16:47:16 2013
@@ -88,6 +88,33 @@ Maven build goals:
   * -Dtest.exclude.pattern=**/<TESTCLASSNAME1>.java,**/<TESTCLASSNAME2>.java
 
 ----------------------------------------------------------------------------------
+Building components separately
+
+If you are building a submodule directory, all the hadoop dependencies this
+submodule has will be resolved as all other 3rd party dependencies. This is,
+from the Maven cache or from a Maven repository (if not available in the cache
+or the SNAPSHOT 'timed out').
+An alternative is to run 'mvn install -DskipTests' from Hadoop source top
+level once; and then work from the submodule. Keep in mind that SNAPSHOTs
+time out after a while, using the Maven '-nsu' will stop Maven from trying
+to update SNAPSHOTs from external repos.
+
+----------------------------------------------------------------------------------
+Importing projects to eclipse
+
+When you import the project to eclipse, install hadoop-maven-plugins at first.
+
+  $ cd hadoop-maven-plugins
+  $ mvn install
+
+Then, generate eclipse project files.
+
+  $ mvn eclipse:eclipse -DskipTests
+
+At last, import to eclipse by specifying the root directory of the project via
+[File] > [Import] > [Existing Projects into Workspace].
+
+----------------------------------------------------------------------------------
 Building distributions:
 
 Create binary distribution without native code and without documentation:
@@ -111,3 +138,69 @@ Create a local staging version of the we
   $ mvn clean site; mvn site:stage -DstagingDirectory=/tmp/hadoop-site
 
 ----------------------------------------------------------------------------------
+
+Building on OS/X
+
+----------------------------------------------------------------------------------
+
+Hadoop does not build on OS/X with Java 7.
+see: https://issues.apache.org/jira/browse/HADOOP-9350
+
+----------------------------------------------------------------------------------
+
+Building on Windows
+
+----------------------------------------------------------------------------------
+Requirements:
+
+* Windows System
+* JDK 1.6
+* Maven 3.0
+* Windows SDK or Visual Studio 2010 Professional
+* ProtocolBuffer 2.4.1+ (for MapReduce and HDFS)
+* Findbugs 1.3.9 (if running findbugs)
+* Unix command-line tools from GnuWin32 or Cygwin: sh, mkdir, rm, cp, tar, gzip
+* Internet connection for first build (to fetch all Maven and Hadoop dependencies)
+
+If using Visual Studio, it must be Visual Studio 2010 Professional (not 2012).
+Do not use Visual Studio Express.  It does not support compiling for 64-bit,
+which is problematic if running a 64-bit system.  The Windows SDK is free to
+download here:
+
+http://www.microsoft.com/en-us/download/details.aspx?id=8279
+
+----------------------------------------------------------------------------------
+Building:
+
+Keep the source code tree in a short path to avoid running into problems related
+to Windows maximum path length limitation.  (For example, C:\hdc).
+
+Run builds from a Windows SDK Command Prompt.  (Start, All Programs,
+Microsoft Windows SDK v7.1, Windows SDK 7.1 Command Prompt.)
+
+JAVA_HOME must be set, and the path must not contain spaces.  If the full path
+would contain spaces, then use the Windows short path instead.
+
+You must set the Platform environment variable to either x64 or Win32 depending
+on whether you're running a 64-bit or 32-bit system.  Note that this is
+case-sensitive.  It must be "Platform", not "PLATFORM" or "platform".
+Environment variables on Windows are usually case-insensitive, but Maven treats
+them as case-sensitive.  Failure to set this environment variable correctly will
+cause msbuild to fail while building the native code in hadoop-common.
+
+set Platform=x64 (when building on a 64-bit system)
+set Platform=Win32 (when building on a 32-bit system)
+
+Several tests require that the user must have the Create Symbolic Links
+privilege.
+
+All Maven goals are the same as described above with the exception that
+native code is built by enabling the 'native-win' Maven profile. -Pnative-win 
+is enabled by default when building on Windows since the native components 
+are required (not optional) on Windows.
+
+----------------------------------------------------------------------------------
+Building distributions:
+
+ * Build distribution with native code    : mvn package [-Pdist][-Pdocs][-Psrc][-Dtar]
+

Modified: hadoop/common/branches/HDFS-347/dev-support/test-patch.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/dev-support/test-patch.sh?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/dev-support/test-patch.sh (original)
+++ hadoop/common/branches/HDFS-347/dev-support/test-patch.sh Mon Apr  1 16:47:16 2013
@@ -418,8 +418,8 @@ checkJavadocWarnings () {
   echo ""
   echo "There appear to be $javadocWarnings javadoc warnings generated by the patched build."
 
-  #There are 6 warnings that are caused by things that are caused by using sun internal APIs.
-  OK_JAVADOC_WARNINGS=6;
+  #There are 11 warnings that are caused by things that are caused by using sun internal APIs.
+  OK_JAVADOC_WARNINGS=11;
   ### if current warnings greater than OK_JAVADOC_WARNINGS
   if [[ $javadocWarnings -ne $OK_JAVADOC_WARNINGS ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT

Modified: hadoop/common/branches/HDFS-347/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml (original)
+++ hadoop/common/branches/HDFS-347/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml Mon Apr  1 16:47:16 2013
@@ -26,6 +26,9 @@
       <outputDirectory>/bin</outputDirectory>
       <excludes>
         <exclude>*.sh</exclude>
+        <exclude>*-config.cmd</exclude>
+        <exclude>start-*.cmd</exclude>
+        <exclude>stop-*.cmd</exclude>
       </excludes>
       <fileMode>0755</fileMode>
     </fileSet>
@@ -38,6 +41,7 @@
       <outputDirectory>/libexec</outputDirectory>
       <includes>
         <include>*-config.sh</include>
+        <include>*-config.cmd</include>
       </includes>
       <fileMode>0755</fileMode>
     </fileSet>
@@ -46,9 +50,13 @@
       <outputDirectory>/sbin</outputDirectory>
       <includes>
         <include>*.sh</include>
+        <include>*.cmd</include>
       </includes>
       <excludes>
         <exclude>hadoop-config.sh</exclude>
+        <exclude>hadoop.cmd</exclude>
+        <exclude>hdfs.cmd</exclude>
+        <exclude>hadoop-config.cmd</exclude>
       </excludes>
       <fileMode>0755</fileMode>
     </fileSet>

Modified: hadoop/common/branches/HDFS-347/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml (original)
+++ hadoop/common/branches/HDFS-347/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml Mon Apr  1 16:47:16 2013
@@ -33,6 +33,7 @@
       <outputDirectory>bin</outputDirectory>
       <includes>
         <include>yarn</include>
+        <include>yarn.cmd</include>
       </includes>
       <fileMode>0755</fileMode>
     </fileSet>
@@ -41,6 +42,7 @@
       <outputDirectory>libexec</outputDirectory>
       <includes>
         <include>yarn-config.sh</include>
+        <include>yarn-config.cmd</include>
       </includes>
       <fileMode>0755</fileMode>
     </fileSet>
@@ -52,6 +54,8 @@
         <include>yarn-daemons.sh</include>
         <include>start-yarn.sh</include>
         <include>stop-yarn.sh</include>
+        <include>start-yarn.cmd</include>
+        <include>stop-yarn.cmd</include>
       </includes>
       <fileMode>0755</fileMode>
     </fileSet>
@@ -121,7 +125,7 @@
       </includes>
       <binaries>
         <attachmentClassifier>tests</attachmentClassifier>
-        <outputDirectory>share/hadoop/${hadoop.component}</outputDirectory>
+        <outputDirectory>share/hadoop/${hadoop.component}/test</outputDirectory>
         <includeDependencies>false</includeDependencies>
         <unpack>false</unpack>
       </binaries>

Modified: hadoop/common/branches/HDFS-347/hadoop-client/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-client/pom.xml?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-client/pom.xml (original)
+++ hadoop/common/branches/HDFS-347/hadoop-client/pom.xml Mon Apr  1 16:47:16 2013
@@ -115,6 +115,14 @@
           <groupId>net.java.dev.jets3t</groupId>
           <artifactId>jets3t</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>com.jcraft</groupId>
+          <artifactId>jsch</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>commons-el</groupId>
+          <artifactId>commons-el</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
 
@@ -131,6 +139,34 @@
           <groupId>org.apache.avro</groupId>
           <artifactId>avro</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jetty</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jetty-util</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-server</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>javax.servlet</groupId>
+          <artifactId>servlet-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>javax.servlet.jsp</groupId>
+          <artifactId>jsp-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
 
@@ -171,6 +207,10 @@
           <groupId>jline</groupId>
           <artifactId>jline</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>io.netty</groupId>
+          <artifactId>netty</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
 
@@ -207,6 +247,18 @@
           <groupId>org.apache.avro</groupId>
           <artifactId>avro</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-json</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>io.netty</groupId>
+          <artifactId>netty</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
 
@@ -247,6 +299,14 @@
           <groupId>com.google.inject.extensions</groupId>
           <artifactId>guice-servlet</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-json</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>io.netty</groupId>
+          <artifactId>netty</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
 
@@ -271,8 +331,25 @@
           <groupId>com.google.inject.extensions</groupId>
           <artifactId>guice-servlet</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>io.netty</groupId>
+          <artifactId>netty</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-annotations</artifactId>
+      <scope>compile</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>jdk.tools</groupId>
+          <artifactId>jdk.tools</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
+    
   </dependencies>
 
 </project>

Modified: hadoop/common/branches/HDFS-347/hadoop-dist/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-dist/pom.xml?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-dist/pom.xml (original)
+++ hadoop/common/branches/HDFS-347/hadoop-dist/pom.xml Mon Apr  1 16:47:16 2013
@@ -107,7 +107,7 @@
                         fi
                       }
 
-                      ROOT=`cd ${basedir}/..;pwd`
+                      ROOT=`cd ../..;pwd`
                       echo
                       echo "Current directory `pwd`"
                       echo
@@ -151,7 +151,8 @@
                         fi
                       }
 
-                      run tar czf hadoop-${project.version}.tar.gz hadoop-${project.version}
+                      run tar cf hadoop-${project.version}.tar hadoop-${project.version}
+                      run gzip -f hadoop-${project.version}.tar
                       echo
                       echo "Hadoop dist tar available at: ${project.build.directory}/hadoop-${project.version}.tar.gz"
                       echo

Modified: hadoop/common/branches/HDFS-347/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java Mon Apr  1 16:47:16 2013
@@ -37,7 +37,7 @@ public class ProtocMojo extends Abstract
   private MavenProject project;
 
   @Parameter
-  private List<File> imports;
+  private File[] imports;
 
   @Parameter(defaultValue="${project.build.directory}/generated-sources/java")
   private File output;
@@ -83,4 +83,4 @@ public class ProtocMojo extends Abstract
     project.addCompileSourceRoot(output.getAbsolutePath());
   }
 
-}
\ No newline at end of file
+}

Modified: hadoop/common/branches/HDFS-347/hadoop-project-dist/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-project-dist/pom.xml?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-project-dist/pom.xml (original)
+++ hadoop/common/branches/HDFS-347/hadoop-project-dist/pom.xml Mon Apr  1 16:47:16 2013
@@ -335,13 +335,7 @@
                   <target>
                     <!-- Using Unix script to preserve symlinks -->
                     <echo file="${project.build.directory}/dist-copynativelibs.sh">
-
-                      which cygpath 2&gt; /dev/null
-                      if [ $? = 1 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
+                      BUILD_DIR="${project.build.directory}"
                       TAR='tar cf -'
                       UNTAR='tar xfBp -'
                       LIB_DIR="${BUILD_DIR}/native/target/usr/local/lib"
@@ -355,6 +349,13 @@
                           $$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
                         fi
                       fi
+                      BIN_DIR="${BUILD_DIR}/bin"
+                      if [ -d $${BIN_DIR} ] ; then
+                        TARGET_BIN_DIR="${BUILD_DIR}/${project.artifactId}-${project.version}/bin"
+                        mkdir -p $${TARGET_BIN_DIR}
+                        cd $${BIN_DIR}
+                        $$TAR * | (cd $${TARGET_BIN_DIR}/; $$UNTAR)
+                      fi
                     </echo>
                     <exec executable="sh" dir="${project.build.directory}" failonerror="true">
                       <arg line="./dist-copynativelibs.sh"/>
@@ -372,15 +373,8 @@
                   <target if="tar">
                     <!-- Using Unix script to preserve symlinks -->
                     <echo file="${project.build.directory}/dist-maketar.sh">
-
-                      which cygpath 2&gt; /dev/null
-                      if [ $? = 1 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      cd ${BUILD_DIR}
-                      tar czf ${project.artifactId}-${project.version}.tar.gz ${project.artifactId}-${project.version}
+                      cd "${project.build.directory}"
+                      tar cf - ${project.artifactId}-${project.version} | gzip > ${project.artifactId}-${project.version}.tar.gz
                     </echo>
                     <exec executable="sh" dir="${project.build.directory}" failonerror="true">
                       <arg line="./dist-maketar.sh"/>

Modified: hadoop/common/branches/HDFS-347/hadoop-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-project/pom.xml?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-347/hadoop-project/pom.xml Mon Apr  1 16:47:16 2013
@@ -46,7 +46,7 @@
 
     <hadoop.assemblies.version>${project.version}</hadoop.assemblies.version>
 
-    <commons-daemon.version>1.0.3</commons-daemon.version>
+    <commons-daemon.version>1.0.13</commons-daemon.version>
 
     <test.build.dir>${project.build.directory}/test-dir</test.build.dir>
     <test.build.data>${test.build.dir}</test.build.data>
@@ -391,9 +391,9 @@
       </dependency>
 
       <dependency>
-        <groupId>org.jboss.netty</groupId>
+        <groupId>io.netty</groupId>
         <artifactId>netty</artifactId>
-        <version>3.2.4.Final</version>
+        <version>3.5.11.Final</version>
       </dependency>
 
       <dependency>
@@ -810,6 +810,8 @@
           <forkedProcessTimeoutInSeconds>900</forkedProcessTimeoutInSeconds>
           <argLine>-Xmx1024m -XX:+HeapDumpOnOutOfMemoryError</argLine>
           <environmentVariables>
+            <!-- HADOOP_HOME required for tests on Windows to find winutils -->
+            <HADOOP_HOME>${basedir}/../../hadoop-common-project/hadoop-common/target</HADOOP_HOME>
             <LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${basedir}/../../hadoop-common-project/hadoop-common/target/native/target/usr/local/lib/</LD_LIBRARY_PATH>
             <MALLOC_ARENA_MAX>4</MALLOC_ARENA_MAX>
           </environmentVariables>
@@ -862,15 +864,6 @@
       <properties>
         <build.platform>${os.name}-${os.arch}-${sun.arch.data.model}</build.platform>
       </properties>
-      <dependencies>
-        <dependency>
-          <groupId>jdk.tools</groupId>
-          <artifactId>jdk.tools</artifactId>
-          <version>1.6</version>
-          <scope>system</scope>
-          <systemPath>${java.home}/../lib/tools.jar</systemPath>
-        </dependency>
-      </dependencies>
     </profile>
     <profile>
       <id>os.mac</id>
@@ -884,6 +877,28 @@
       </properties>
     </profile>
     <profile>
+      <id>native-win</id>
+      <activation>
+        <os>
+          <family>Windows</family>
+        </os>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-surefire-plugin</artifactId>
+            <configuration>
+              <environmentVariables>
+                <!-- Specify where to look for the native DLL on Windows -->
+                <PATH>${env.PATH};${basedir}/../../hadoop-common-project/hadoop-common/target/bin</PATH>
+              </environmentVariables>
+            </configuration>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+    <profile>
       <id>test-patch</id>
       <activation>
         <activeByDefault>false</activeByDefault>

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java Mon Apr  1 16:47:16 2013
@@ -30,6 +30,7 @@ import org.apache.hadoop.tools.util.Dist
 import org.apache.hadoop.security.Credentials;
 
 import java.io.IOException;
+import java.lang.reflect.Constructor;
 
 /**
  * The CopyListing abstraction is responsible for how the list of
@@ -193,14 +194,34 @@ public abstract class CopyListing extend
    * @param credentials Credentials object on which the FS delegation tokens are cached
    * @param options The input Options, to help choose the appropriate CopyListing Implementation.
    * @return An instance of the appropriate CopyListing implementation.
+   * @throws java.io.IOException - Exception if any
    */
   public static CopyListing getCopyListing(Configuration configuration,
                                            Credentials credentials,
-                                           DistCpOptions options) {
-    if (options.getSourceFileListing() == null) {
-      return new GlobbedCopyListing(configuration, credentials);
-    } else {
-      return new FileBasedCopyListing(configuration, credentials);
+                                           DistCpOptions options)
+      throws IOException {
+
+    String copyListingClassName = configuration.get(DistCpConstants.
+        CONF_LABEL_COPY_LISTING_CLASS, "");
+    Class<? extends CopyListing> copyListingClass;
+    try {
+      if (! copyListingClassName.isEmpty()) {
+        copyListingClass = configuration.getClass(DistCpConstants.
+            CONF_LABEL_COPY_LISTING_CLASS, GlobbedCopyListing.class,
+            CopyListing.class);
+      } else {
+        if (options.getSourceFileListing() == null) {
+            copyListingClass = GlobbedCopyListing.class;
+        } else {
+            copyListingClass = FileBasedCopyListing.class;
+        }
+      }
+      copyListingClassName = copyListingClass.getName();
+      Constructor<? extends CopyListing> constructor = copyListingClass.
+          getDeclaredConstructor(Configuration.class, Credentials.class);
+      return constructor.newInstance(configuration, credentials);
+    } catch (Exception e) {
+      throw new IOException("Unable to instantiate " + copyListingClassName, e);
     }
   }
 

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java Mon Apr  1 16:47:16 2013
@@ -319,7 +319,7 @@ public class DistCp extends Configured i
    * @return Returns the path where the copy listing is created
    * @throws IOException - If any
    */
-  private Path createInputFileListing(Job job) throws IOException {
+  protected Path createInputFileListing(Job job) throws IOException {
     Path fileListingPath = getFileListingPath();
     CopyListing copyListing = CopyListing.getCopyListing(job.getConfiguration(),
         job.getCredentials(), inputOptions);
@@ -334,7 +334,7 @@ public class DistCp extends Configured i
    * @return - Path where the copy listing file has to be saved
    * @throws IOException - Exception if any
    */
-  private Path getFileListingPath() throws IOException {
+  protected Path getFileListingPath() throws IOException {
     String fileListPathStr = metaFolder + "/fileList.seq";
     Path path = new Path(fileListPathStr);
     return new Path(path.toUri().normalize().toString());

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java Mon Apr  1 16:47:16 2013
@@ -82,6 +82,9 @@ public class DistCpConstants {
   /* Meta folder where the job's intermediate data is kept */
   public static final String CONF_LABEL_META_FOLDER = "distcp.meta.folder";
 
+  /* DistCp CopyListing class override param */
+  public static final String CONF_LABEL_COPY_LISTING_CLASS = "distcp.copy.listing.class";
+
   /**
    * Conf label for SSL Trust-store location.
    */

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java Mon Apr  1 16:47:16 2013
@@ -127,17 +127,20 @@ public class SimpleCopyListing extends C
             if (LOG.isDebugEnabled()) {
               LOG.debug("Recording source-path: " + sourceStatus.getPath() + " for copy.");
             }
-            writeToFileListing(fileListWriter, sourceStatus, sourcePathRoot, localFile);
+            writeToFileListing(fileListWriter, sourceStatus, sourcePathRoot,
+                localFile, options);
 
             if (isDirectoryAndNotEmpty(sourceFS, sourceStatus)) {
               if (LOG.isDebugEnabled()) {
                 LOG.debug("Traversing non-empty source dir: " + sourceStatus.getPath());
               }
-              traverseNonEmptyDirectory(fileListWriter, sourceStatus, sourcePathRoot, localFile);
+              traverseNonEmptyDirectory(fileListWriter, sourceStatus, sourcePathRoot,
+                  localFile, options);
             }
           }
         } else {
-          writeToFileListing(fileListWriter, rootStatus, sourcePathRoot, localFile);
+          writeToFileListing(fileListWriter, rootStatus, sourcePathRoot,
+              localFile, options);
         }
       }
     } finally {
@@ -169,6 +172,17 @@ public class SimpleCopyListing extends C
     }
   }
 
+  /**
+   * Provide an option to skip copy of a path, Allows for exclusion
+   * of files such as {@link org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter#SUCCEEDED_FILE_NAME}
+   * @param path - Path being considered for copy while building the file listing
+   * @param options - Input options passed during DistCp invocation
+   * @return - True if the path should be considered for copy, false otherwise
+   */
+  protected boolean shouldCopy(Path path, DistCpOptions options) {
+    return true;
+  }
+
   /** {@inheritDoc} */
   @Override
   protected long getBytesToCopy() {
@@ -210,7 +224,9 @@ public class SimpleCopyListing extends C
 
   private void traverseNonEmptyDirectory(SequenceFile.Writer fileListWriter,
                                          FileStatus sourceStatus,
-                                         Path sourcePathRoot, boolean localFile)
+                                         Path sourcePathRoot,
+                                         boolean localFile,
+                                         DistCpOptions options)
                                          throws IOException {
     FileSystem sourceFS = sourcePathRoot.getFileSystem(getConf());
     Stack<FileStatus> pathStack = new Stack<FileStatus>();
@@ -221,7 +237,8 @@ public class SimpleCopyListing extends C
         if (LOG.isDebugEnabled())
           LOG.debug("Recording source-path: "
                     + sourceStatus.getPath() + " for copy.");
-        writeToFileListing(fileListWriter, child, sourcePathRoot, localFile);
+        writeToFileListing(fileListWriter, child, sourcePathRoot,
+             localFile, options);
         if (isDirectoryAndNotEmpty(sourceFS, child)) {
           if (LOG.isDebugEnabled())
             LOG.debug("Traversing non-empty source dir: "
@@ -233,8 +250,10 @@ public class SimpleCopyListing extends C
   }
 
   private void writeToFileListing(SequenceFile.Writer fileListWriter,
-                                  FileStatus fileStatus, Path sourcePathRoot,
-                                  boolean localFile) throws IOException {
+                                  FileStatus fileStatus,
+                                  Path sourcePathRoot,
+                                  boolean localFile,
+                                  DistCpOptions options) throws IOException {
     if (fileStatus.getPath().equals(sourcePathRoot) && fileStatus.isDirectory())
       return; // Skip the root-paths.
 
@@ -248,6 +267,10 @@ public class SimpleCopyListing extends C
       status = getFileStatus(fileStatus);
     }
 
+    if (!shouldCopy(fileStatus.getPath(), options)) {
+      return;
+    }
+
     fileListWriter.append(new Text(DistCpUtils.getRelativePath(sourcePathRoot,
         fileStatus.getPath())), status);
     fileListWriter.sync();

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java Mon Apr  1 16:47:16 2013
@@ -124,7 +124,7 @@ public class RetriableFileCopyCommand ex
             tmpTargetPath, true, BUFFER_SIZE,
             getReplicationFactor(fileAttributes, sourceFileStatus, targetFS, tmpTargetPath),
             getBlockSize(fileAttributes, sourceFileStatus, targetFS, tmpTargetPath), context));
-    return copyBytes(sourceFileStatus, outStream, BUFFER_SIZE, true, context);
+    return copyBytes(sourceFileStatus, outStream, BUFFER_SIZE, context);
   }
 
   private void compareFileLengths(FileStatus sourceFileStatus, Path target,
@@ -170,8 +170,8 @@ public class RetriableFileCopyCommand ex
   }
 
   private long copyBytes(FileStatus sourceFileStatus, OutputStream outStream,
-                         int bufferSize, boolean mustCloseStream,
-                         Mapper.Context context) throws IOException {
+                         int bufferSize, Mapper.Context context)
+      throws IOException {
     Path source = sourceFileStatus.getPath();
     byte buf[] = new byte[bufferSize];
     ThrottledInputStream inStream = null;
@@ -187,8 +187,7 @@ public class RetriableFileCopyCommand ex
         bytesRead = inStream.read(buf);
       }
     } finally {
-      if (mustCloseStream)
-        IOUtils.cleanup(LOG, outStream, inStream);
+      IOUtils.cleanup(LOG, outStream, inStream);
     }
 
     return totalBytesRead;

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/ThrottledInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/ThrottledInputStream.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/ThrottledInputStream.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/ThrottledInputStream.java Mon Apr  1 16:47:16 2013
@@ -52,6 +52,11 @@ public class ThrottledInputStream extend
     this.maxBytesPerSec = maxBytesPerSec;
   }
 
+  @Override
+  public void close() throws IOException {
+    rawStream.close();
+  }
+
   /** @inheritDoc */
   @Override
   public int read() throws IOException {

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java Mon Apr  1 16:47:16 2013
@@ -24,6 +24,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
 import org.apache.hadoop.tools.util.TestDistCpUtils;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.security.Credentials;
@@ -79,7 +80,39 @@ public class TestCopyListing extends Sim
     return 0;
   }
 
-  @Test
+  @Test(timeout=10000)
+  public void testSkipCopy() throws Exception {
+    SimpleCopyListing listing = new SimpleCopyListing(getConf(), CREDENTIALS) {
+      @Override
+      protected boolean shouldCopy(Path path, DistCpOptions options) {
+        return !path.getName().equals(FileOutputCommitter.SUCCEEDED_FILE_NAME);
+      }
+    };
+    FileSystem fs = FileSystem.get(getConf());
+    List<Path> srcPaths = new ArrayList<Path>();
+    srcPaths.add(new Path("/tmp/in4/1"));
+    srcPaths.add(new Path("/tmp/in4/2"));
+    Path target = new Path("/tmp/out4/1");
+    TestDistCpUtils.createFile(fs, "/tmp/in4/1/_SUCCESS");
+    TestDistCpUtils.createFile(fs, "/tmp/in4/1/file");
+    TestDistCpUtils.createFile(fs, "/tmp/in4/2");
+    fs.mkdirs(target);
+    DistCpOptions options = new DistCpOptions(srcPaths, target);
+    Path listingFile = new Path("/tmp/list4");
+    listing.buildListing(listingFile, options);
+    Assert.assertEquals(listing.getNumberOfPaths(), 2);
+    SequenceFile.Reader reader = new SequenceFile.Reader(getConf(),
+        SequenceFile.Reader.file(listingFile));
+    FileStatus fileStatus = new FileStatus();
+    Text relativePath = new Text();
+    Assert.assertTrue(reader.next(relativePath, fileStatus));
+    Assert.assertEquals(relativePath.toString(), "/1/file");
+    Assert.assertTrue(reader.next(relativePath, fileStatus));
+    Assert.assertEquals(relativePath.toString(), "/2");
+    Assert.assertFalse(reader.next(relativePath, fileStatus));
+  }
+
+  @Test(timeout=10000)
   public void testMultipleSrcToFile() {
     FileSystem fs = null;
     try {
@@ -124,7 +157,7 @@ public class TestCopyListing extends Sim
     }
   }
 
-  @Test
+  @Test(timeout=10000)
   public void testDuplicates() {
     FileSystem fs = null;
     try {
@@ -150,7 +183,7 @@ public class TestCopyListing extends Sim
     }
   }
 
-  @Test
+  @Test(timeout=10000)
   public void testBuildListing() {
     FileSystem fs = null;
     try {
@@ -206,7 +239,7 @@ public class TestCopyListing extends Sim
     }
   }
 
-  @Test
+  @Test(timeout=10000)
   public void testBuildListingForSingleFile() {
     FileSystem fs = null;
     String testRootString = "/singleFileListing";

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java Mon Apr  1 16:47:16 2013
@@ -26,6 +26,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.Cluster;
 import org.apache.hadoop.mapreduce.JobSubmissionFiles;
+import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.tools.util.TestDistCpUtils;
 import org.junit.Assert;
 import org.junit.BeforeClass;
@@ -34,6 +35,7 @@ import org.junit.Test;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 public class TestIntegration {
@@ -68,7 +70,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testSingleFileMissingTarget() {
     caseSingleFileMissingTarget(false);
     caseSingleFileMissingTarget(true);
@@ -91,7 +93,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testSingleFileTargetFile() {
     caseSingleFileTargetFile(false);
     caseSingleFileTargetFile(true);
@@ -101,7 +103,7 @@ public class TestIntegration {
 
     try {
       addEntries(listFile, "singlefile1/file1");
-      createFiles("singlefile1/file1", target.toString());
+      createFiles("singlefile1/file1", "target");
 
       runTest(listFile, target, sync);
 
@@ -114,7 +116,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testSingleFileTargetDir() {
     caseSingleFileTargetDir(false);
     caseSingleFileTargetDir(true);
@@ -138,7 +140,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testSingleDirTargetMissing() {
     caseSingleDirTargetMissing(false);
     caseSingleDirTargetMissing(true);
@@ -161,7 +163,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testSingleDirTargetPresent() {
 
     try {
@@ -180,7 +182,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testUpdateSingleDirTargetPresent() {
 
     try {
@@ -199,7 +201,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testMultiFileTargetPresent() {
     caseMultiFileTargetPresent(false);
     caseMultiFileTargetPresent(true);
@@ -223,7 +225,56 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
+  public void testCustomCopyListing() {
+
+    try {
+      addEntries(listFile, "multifile1/file3", "multifile1/file4", "multifile1/file5");
+      createFiles("multifile1/file3", "multifile1/file4", "multifile1/file5");
+      mkdirs(target.toString());
+
+      Configuration conf = getConf();
+      try {
+        conf.setClass(DistCpConstants.CONF_LABEL_COPY_LISTING_CLASS,
+            CustomCopyListing.class, CopyListing.class);
+        DistCpOptions options = new DistCpOptions(Arrays.
+            asList(new Path(root + "/" + "multifile1")), target);
+        options.setSyncFolder(true);
+        options.setDeleteMissing(false);
+        options.setOverwrite(false);
+        try {
+          new DistCp(conf, options).execute();
+        } catch (Exception e) {
+          LOG.error("Exception encountered ", e);
+          throw new IOException(e);
+        }
+      } finally {
+        conf.unset(DistCpConstants.CONF_LABEL_COPY_LISTING_CLASS);
+      }
+
+      checkResult(target, 2, "file4", "file5");
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing distcp", e);
+      Assert.fail("distcp failure");
+    } finally {
+      TestDistCpUtils.delete(fs, root);
+    }
+  }
+
+  private static class CustomCopyListing extends SimpleCopyListing {
+
+    public CustomCopyListing(Configuration configuration,
+                             Credentials credentials) {
+      super(configuration, credentials);
+    }
+
+    @Override
+    protected boolean shouldCopy(Path path, DistCpOptions options) {
+      return !path.getName().equals("file3");
+    }
+  }
+
+  @Test(timeout=100000)
   public void testMultiFileTargetMissing() {
     caseMultiFileTargetMissing(false);
     caseMultiFileTargetMissing(true);
@@ -246,7 +297,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testMultiDirTargetPresent() {
 
     try {
@@ -265,7 +316,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testUpdateMultiDirTargetPresent() {
 
     try {
@@ -284,7 +335,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testMultiDirTargetMissing() {
 
     try {
@@ -304,7 +355,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testUpdateMultiDirTargetMissing() {
 
     try {
@@ -323,7 +374,7 @@ public class TestIntegration {
     }
   }
   
-  @Test
+  @Test(timeout=100000)
   public void testDeleteMissingInDestination() {
     
     try {
@@ -343,7 +394,7 @@ public class TestIntegration {
     }
   }
   
-  @Test
+  @Test(timeout=100000)
   public void testOverwrite() {
     byte[] contents1 = "contents1".getBytes();
     byte[] contents2 = "contents2".getBytes();
@@ -375,7 +426,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testGlobTargetMissingSingleLevel() {
 
     try {
@@ -398,7 +449,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testUpdateGlobTargetMissingSingleLevel() {
 
     try {
@@ -420,7 +471,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testGlobTargetMissingMultiLevel() {
 
     try {
@@ -444,7 +495,7 @@ public class TestIntegration {
     }
   }
 
-  @Test
+  @Test(timeout=100000)
   public void testUpdateGlobTargetMissingMultiLevel() {
 
     try {
@@ -468,7 +519,7 @@ public class TestIntegration {
     }
   }
   
-  @Test
+  @Test(timeout=100000)
   public void testCleanup() {
     try {
       Path sourcePath = new Path("noscheme:///file");

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java Mon Apr  1 16:47:16 2013
@@ -33,8 +33,6 @@ import org.apache.hadoop.tools.CopyListi
 import org.apache.hadoop.tools.DistCpOptions;
 import org.apache.hadoop.tools.StubContext;
 import org.apache.hadoop.security.Credentials;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
@@ -48,9 +46,6 @@ import java.util.Random;
 
 
 public class TestUniformSizeInputFormat {
-  private static final Log LOG
-                = LogFactory.getLog(TestUniformSizeInputFormat.class);
-
   private static MiniDFSCluster cluster;
   private static final int N_FILES = 20;
   private static final int SIZEOF_EACH_FILE=1024;
@@ -118,12 +113,9 @@ public class TestUniformSizeInputFormat 
     List<InputSplit> splits
             = uniformSizeInputFormat.getSplits(jobContext);
 
-    List<InputSplit> legacySplits = legacyGetSplits(listFile, nMaps);
-
     int sizePerMap = totalFileSize/nMaps;
 
     checkSplits(listFile, splits);
-    checkAgainstLegacy(splits, legacySplits);
 
     int doubleCheckedTotalSize = 0;
     int previousSplitSize = -1;
@@ -155,57 +147,6 @@ public class TestUniformSizeInputFormat 
     Assert.assertEquals(totalFileSize, doubleCheckedTotalSize);
   }
 
-  // From
-  // http://svn.apache.org/repos/asf/hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/DistCp.java
-  private List<InputSplit> legacyGetSplits(Path listFile, int numSplits)
-      throws IOException {
-
-    FileSystem fs = cluster.getFileSystem();
-    FileStatus srcst = fs.getFileStatus(listFile);
-    Configuration conf = fs.getConf();
-
-    ArrayList<InputSplit> splits = new ArrayList<InputSplit>(numSplits);
-    FileStatus value = new FileStatus();
-    Text key = new Text();
-    final long targetsize = totalFileSize / numSplits;
-    long pos = 0L;
-    long last = 0L;
-    long acc = 0L;
-    long cbrem = srcst.getLen();
-    SequenceFile.Reader sl = null;
-
-    LOG.info("Average bytes per map: " + targetsize +
-        ", Number of maps: " + numSplits + ", total size: " + totalFileSize);
-
-    try {
-      sl = new SequenceFile.Reader(conf, SequenceFile.Reader.file(listFile));
-      for (; sl.next(key, value); last = sl.getPosition()) {
-        // if adding this split would put this split past the target size,
-        // cut the last split and put this next file in the next split.
-        if (acc + value.getLen() > targetsize && acc != 0) {
-          long splitsize = last - pos;
-          FileSplit fileSplit = new FileSplit(listFile, pos, splitsize, null);
-          LOG.info ("Creating split : " + fileSplit + ", bytes in split: " + splitsize);
-          splits.add(fileSplit);
-          cbrem -= splitsize;
-          pos = last;
-          acc = 0L;
-        }
-        acc += value.getLen();
-      }
-    }
-    finally {
-      IOUtils.closeStream(sl);
-    }
-    if (cbrem != 0) {
-      FileSplit fileSplit = new FileSplit(listFile, pos, cbrem, null);
-      LOG.info ("Creating split : " + fileSplit + ", bytes in split: " + cbrem);
-      splits.add(fileSplit);
-    }
-
-    return splits;
-  }
-
   private void checkSplits(Path listFile, List<InputSplit> splits) throws IOException {
     long lastEnd = 0;
 
@@ -233,18 +174,6 @@ public class TestUniformSizeInputFormat 
     }
   }
 
-  private void checkAgainstLegacy(List<InputSplit> splits,
-                                  List<InputSplit> legacySplits)
-      throws IOException, InterruptedException {
-
-    Assert.assertEquals(legacySplits.size(), splits.size());
-    for (int index = 0; index < splits.size(); index++) {
-      FileSplit fileSplit = (FileSplit) splits.get(index);
-      FileSplit legacyFileSplit = (FileSplit) legacySplits.get(index);
-      Assert.assertEquals(fileSplit.getStart(), legacyFileSplit.getStart());
-    }
-  }
-
   @Test
   public void testGetSplits() throws Exception {
     testGetSplits(9);

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/LoadJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/LoadJob.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/LoadJob.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/LoadJob.java Mon Apr  1 16:47:16 2013
@@ -37,11 +37,11 @@ import org.apache.hadoop.mapreduce.TaskI
 import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
-import org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.tools.rumen.JobStory;
 import org.apache.hadoop.tools.rumen.ResourceUsageMetrics;
 import org.apache.hadoop.tools.rumen.TaskInfo;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 
 import java.io.IOException;
 import java.security.PrivilegedExceptionAction;

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/CumulativeCpuUsageEmulatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/CumulativeCpuUsageEmulatorPlugin.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/CumulativeCpuUsageEmulatorPlugin.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/CumulativeCpuUsageEmulatorPlugin.java Mon Apr  1 16:47:16 2013
@@ -22,8 +22,8 @@ import java.util.Random;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.gridmix.Progressive;
-import org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin;
 import org.apache.hadoop.tools.rumen.ResourceUsageMetrics;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 
 /**
  * <p>A {@link ResourceUsageEmulatorPlugin} that emulates the cumulative CPU 
@@ -166,7 +166,7 @@ implements ResourceUsageEmulatorPlugin {
      */
     public void calibrate(ResourceCalculatorPlugin monitor, 
                           long totalCpuUsage) {
-      long initTime = monitor.getProcResourceValues().getCumulativeCpuTime();
+      long initTime = monitor.getCumulativeCpuTime();
       
       long defaultLoopSize = 0;
       long finalTime = initTime;
@@ -175,7 +175,7 @@ implements ResourceUsageEmulatorPlugin {
       while (finalTime - initTime < 100) { // 100 ms
         ++defaultLoopSize;
         performUnitComputation(); //perform unit computation
-        finalTime = monitor.getProcResourceValues().getCumulativeCpuTime();
+        finalTime = monitor.getCumulativeCpuTime();
       }
       
       long referenceRuntime = finalTime - initTime;
@@ -230,7 +230,7 @@ implements ResourceUsageEmulatorPlugin {
   }
   
   private synchronized long getCurrentCPUUsage() {
-    return monitor.getProcResourceValues().getCumulativeCpuTime();
+    return monitor.getCumulativeCpuTime();
   }
   
   @Override

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageEmulatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageEmulatorPlugin.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageEmulatorPlugin.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageEmulatorPlugin.java Mon Apr  1 16:47:16 2013
@@ -20,7 +20,7 @@ package org.apache.hadoop.mapred.gridmix
 import java.io.IOException;
 
 import org.apache.hadoop.mapred.gridmix.Progressive;
-import org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 import org.apache.hadoop.tools.rumen.ResourceUsageMetrics;
 import org.apache.hadoop.conf.Configuration;
 

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageMatcher.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageMatcher.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageMatcher.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageMatcher.java Mon Apr  1 16:47:16 2013
@@ -23,9 +23,9 @@ import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.gridmix.Progressive;
-import org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin;
 import org.apache.hadoop.tools.rumen.ResourceUsageMetrics;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 
 /**
  * <p>This is the driver class for managing all the resource usage emulators.

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/TotalHeapUsageEmulatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/TotalHeapUsageEmulatorPlugin.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/TotalHeapUsageEmulatorPlugin.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/TotalHeapUsageEmulatorPlugin.java Mon Apr  1 16:47:16 2013
@@ -21,8 +21,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.gridmix.Progressive;
-import org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin;
 import org.apache.hadoop.tools.rumen.ResourceUsageMetrics;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 
 /**
  * <p>A {@link ResourceUsageEmulatorPlugin} that emulates the total heap 

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixMemoryEmulation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixMemoryEmulation.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixMemoryEmulation.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixMemoryEmulation.java Mon Apr  1 16:47:16 2013
@@ -23,7 +23,6 @@ import static org.junit.Assert.*;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapred.DummyResourceCalculatorPlugin;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.gridmix.DebugJobProducer.MockJob;
 import org.apache.hadoop.mapred.gridmix.TestHighRamJob.DummyGridmixJob;
@@ -32,8 +31,8 @@ import org.apache.hadoop.mapred.gridmix.
 import org.apache.hadoop.mapred.gridmix.emulators.resourceusage.TotalHeapUsageEmulatorPlugin.DefaultHeapUsageEmulator;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin;
 import org.apache.hadoop.tools.rumen.ResourceUsageMetrics;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 
 /**
  * Test Gridmix memory emulation.

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixSummary.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixSummary.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixSummary.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixSummary.java Mon Apr  1 16:47:16 2013
@@ -257,7 +257,7 @@ public class TestGridmixSummary {
                  qPath.toString(), es.getInputTraceLocation());
     // test expected data size
     assertEquals("Mismatch in expected data size", 
-                 "1.0k", es.getExpectedDataSize());
+                 "1 K", es.getExpectedDataSize());
     // test input data statistics
     assertEquals("Mismatch in input data statistics", 
                  ExecutionSummarizer.stringifyDataStatistics(dataStats), 
@@ -272,7 +272,7 @@ public class TestGridmixSummary {
     es.finalize(factory, testTraceFile.toString(), 1024*1024*1024*10L, resolver,
                 dataStats, conf);
     assertEquals("Mismatch in expected data size", 
-                 "10.0g", es.getExpectedDataSize());
+                 "10 G", es.getExpectedDataSize());
     
     // test trace signature uniqueness
     //  touch the trace file
@@ -389,4 +389,4 @@ public class TestGridmixSummary {
     assertEquals("Cluster summary test failed!", 0, 
                  cs.getNumBlacklistedTrackers());
   }
-}
\ No newline at end of file
+}

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestResourceUsageEmulators.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestResourceUsageEmulators.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestResourceUsageEmulators.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestResourceUsageEmulators.java Mon Apr  1 16:47:16 2013
@@ -31,14 +31,13 @@ import org.apache.hadoop.mapreduce.TaskI
 import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
-import org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin;
 import org.apache.hadoop.tools.rumen.ResourceUsageMetrics;
-import org.apache.hadoop.mapred.DummyResourceCalculatorPlugin;
 import org.apache.hadoop.mapred.gridmix.LoadJob.ResourceUsageMatcherRunner;
 import org.apache.hadoop.mapred.gridmix.emulators.resourceusage.CumulativeCpuUsageEmulatorPlugin;
 import org.apache.hadoop.mapred.gridmix.emulators.resourceusage.ResourceUsageEmulatorPlugin;
 import org.apache.hadoop.mapred.gridmix.emulators.resourceusage.ResourceUsageMatcher;
 import org.apache.hadoop.mapred.gridmix.emulators.resourceusage.CumulativeCpuUsageEmulatorPlugin.DefaultCpuUsageEmulator;
+import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 
 /**
  * Test Gridmix's resource emulator framework and supported plugins.
@@ -242,16 +241,6 @@ public class TestResourceUsageEmulators 
     public long getCumulativeCpuTime() {
       return core.getCpuUsage();
     }
-
-    /**
-     * Returns a {@link ProcResourceValues} with cumulative cpu usage  
-     * computed using {@link #getCumulativeCpuTime()}.
-     */
-    @Override
-    public ProcResourceValues getProcResourceValues() {
-      long usageValue = getCumulativeCpuTime();
-      return new ProcResourceValues(usageValue, -1, -1);
-    }
   }
   
   /**

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-pipes/src/CMakeLists.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-pipes/src/CMakeLists.txt?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-pipes/src/CMakeLists.txt (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-pipes/src/CMakeLists.txt Mon Apr  1 16:47:16 2013
@@ -21,10 +21,10 @@ find_package(OpenSSL REQUIRED)
 
 set(CMAKE_BUILD_TYPE, Release)
 
-set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -Wall -O2")
-set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -Wall -O2")
-set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_REENTRANT -D_FILE_OFFSET_BITS=64")
-set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_REENTRANT -D_FILE_OFFSET_BITS=64")
+set(PIPES_FLAGS "-g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE")
+set(PIPES_FLAGS "${PIPES_FLAGS} -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64")
+set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${PIPES_FLAGS}")
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${PIPES_FLAGS}")
 
 include(../../../hadoop-common-project/hadoop-common/src/JNIFlags.cmake NO_POLICY_SCOPE)
 

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java Mon Apr  1 16:47:16 2013
@@ -83,6 +83,9 @@ public class JobBuilder {
   private Map<ParsedHost, ParsedHost> allHosts =
       new HashMap<ParsedHost, ParsedHost>();
 
+  private org.apache.hadoop.mapreduce.jobhistory.JhCounters EMPTY_COUNTERS =
+      new org.apache.hadoop.mapreduce.jobhistory.JhCounters();
+
   /**
    * The number of splits a task can have, before we ignore them all.
    */
@@ -459,7 +462,10 @@ public class JobBuilder {
     TaskFailed t = (TaskFailed)(event.getDatum());
     task.putDiagnosticInfo(t.error.toString());
     task.putFailedDueToAttemptId(t.failedDueToAttempt.toString());
-    // No counters in TaskFailedEvent
+    org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
+        ((TaskFailed) event.getDatum()).counters;
+    task.incorporateCounters(
+        counters == null ? EMPTY_COUNTERS : counters);
   }
 
   private void processTaskAttemptUnsuccessfulCompletionEvent(
@@ -481,7 +487,10 @@ public class JobBuilder {
     }
 
     attempt.setFinishTime(event.getFinishTime());
-
+    org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
+        ((TaskAttemptUnsuccessfulCompletion) event.getDatum()).counters;
+    attempt.incorporateCounters(
+        counters == null ? EMPTY_COUNTERS : counters);
     attempt.arraySetClockSplits(event.getClockSplits());
     attempt.arraySetCpuUsages(event.getCpuUsages());
     attempt.arraySetVMemKbytes(event.getVMemKbytes());
@@ -489,7 +498,6 @@ public class JobBuilder {
     TaskAttemptUnsuccessfulCompletion t =
         (TaskAttemptUnsuccessfulCompletion) (event.getDatum());
     attempt.putDiagnosticInfo(t.error.toString());
-    // No counters in TaskAttemptUnsuccessfulCompletionEvent
   }
 
   private void processTaskAttemptStartedEvent(TaskAttemptStartedEvent event) {

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java Mon Apr  1 16:47:16 2013
@@ -68,6 +68,7 @@ public class TestStreamReduceNone
       "-reducer", "org.apache.hadoop.mapred.lib.IdentityReducer",
       "-numReduceTasks", "0",
       "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
+      "-jobconf", "mapreduce.job.maps=1",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java Mon Apr  1 16:47:16 2013
@@ -54,6 +54,8 @@ public class TestStreamXmlRecordReader e
   protected String[] genArgs() {
     args.add("-inputreader");
     args.add("StreamXmlRecordReader,begin=<xmltag>,end=</xmltag>");
+    args.add("-jobconf");
+    args.add("mapreduce.job.maps=1");
     return super.genArgs();
   }
 }

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java Mon Apr  1 16:47:16 2013
@@ -83,7 +83,7 @@ public class TestStreamingTaskLog {
    *  (b) hadoop.tasklog.totalLogFileSize
    * for the children of java tasks in streaming jobs.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testStreamingTaskLogWithHadoopCmd() {
     try {
       final int numSlaves = 1;
@@ -124,8 +124,8 @@ public class TestStreamingTaskLog {
               "echo $HADOOP_ROOT_LOGGER $HADOOP_CLIENT_OPTS").getBytes());
     in.close();
     
-    Shell.execCommand(new String[]{"chmod", "+x",
-                                   scriptFile.getAbsolutePath()});
+    Shell.execCommand(Shell.getSetPermissionCommand("+x", false,
+        scriptFile.getAbsolutePath()));
     return scriptFile;
   }
   

Modified: hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java Mon Apr  1 16:47:16 2013
@@ -53,7 +53,7 @@ public class TestSymLink
   String cacheString = "This is just the cache string";
   StreamJob job;
 
-  @Test
+  @Test (timeout = 60000)
   public void testSymLink() throws Exception
   {
     boolean mayExit = false;



Mime
View raw message