hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cmcc...@apache.org
Subject svn commit: r1619012 [1/3] - in /hadoop/common/branches/HADOOP-10388: ./ hadoop-assemblies/src/main/resources/assemblies/ hadoop-client/ hadoop-dist/ hadoop-maven-plugins/ hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ hadoop...
Date Tue, 19 Aug 2014 23:50:07 GMT
Author: cmccabe
Date: Tue Aug 19 23:49:39 2014
New Revision: 1619012

URL: http://svn.apache.org/r1619012
Log:
merge trunk into HADOOP-10388 branch

Added:
    hadoop/common/branches/HADOOP-10388/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml
      - copied unchanged from r1619000, hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-azure/   (props changed)
      - copied from r1619000, hadoop/common/trunk/hadoop-tools/hadoop-azure/
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java
      - copied unchanged from r1619000, hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithAcls.java
      - copied unchanged from r1619000, hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithAcls.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithXAttrs.java
      - copied unchanged from r1619000, hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithXAttrs.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/contract/
      - copied from r1619000, hadoop/common/trunk/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/contract/
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-openstack/src/test/resources/contract/
      - copied from r1619000, hadoop/common/trunk/hadoop-tools/hadoop-openstack/src/test/resources/contract/
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/
      - copied from r1619000, hadoop/common/trunk/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/nodemanager/
      - copied from r1619000, hadoop/common/trunk/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/nodemanager/
Removed:
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/site/fml/
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/site/pdf.xml
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/site/xdoc/
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/exceptions/SwiftNotDirectoryException.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/exceptions/SwiftPathExistsException.java
Modified:
    hadoop/common/branches/HADOOP-10388/   (props changed)
    hadoop/common/branches/HADOOP-10388/.gitignore
    hadoop/common/branches/HADOOP-10388/BUILDING.txt
    hadoop/common/branches/HADOOP-10388/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
    hadoop/common/branches/HADOOP-10388/hadoop-client/pom.xml
    hadoop/common/branches/HADOOP-10388/hadoop-dist/pom.xml
    hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/pom.xml
    hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java
    hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java
    hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/FileSetUtils.java
    hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
    hadoop/common/branches/HADOOP-10388/hadoop-native-core/src/main/native/GenerateProtobufs.cmake
    hadoop/common/branches/HADOOP-10388/hadoop-project-dist/pom.xml
    hadoop/common/branches/HADOOP-10388/hadoop-project/pom.xml
    hadoop/common/branches/HADOOP-10388/hadoop-project/src/site/site.xml
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/pom.xml
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/UniformSizeInputFormat.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputChunk.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputFormat.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/ThrottledInputStream.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/resources/distcp-default.xml
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/StubContext.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestFileBasedCopyListing.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestGlobbedCopyListing.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestRetriableFileCopyCommand.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/lib/TestDynamicInputFormat.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/test/resources/sslConfig.xml
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridMixClasses.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixRecord.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-openstack/pom.xml
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/StrictBufferedFSInputStream.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystem.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeInputStream.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeOutputStream.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemBasicOps.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemContract.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemRename.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/hdfs2/TestV2LsOperations.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/appmaster/AMSimulator.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/appmaster/MRAMSimulator.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/conf/SLSConfiguration.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NMSimulator.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/CapacitySchedulerMetrics.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ContainerSimulator.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/FairSchedulerMetrics.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/FifoSchedulerMetrics.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/NodeUpdateSchedulerEventWrapper.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/SLSCapacityScheduler.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/SchedulerMetrics.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/SchedulerWrapper.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/TaskRunner.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/site/apt/SchedulerLoadSimulator.apt.vm
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSRunner.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/utils/TestSLSUtils.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/web/TestSLSWebApp.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/io/TestKeyOnlyTextOutputReader.java
    hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-tools-dist/pom.xml
    hadoop/common/branches/HADOOP-10388/hadoop-tools/pom.xml

Propchange: hadoop/common/branches/HADOOP-10388/
------------------------------------------------------------------------------
  Merged /hadoop/common/branches/branch-2.5:r1607590,1607618,1609091
  Merged /hadoop/common/branches/branch-2:r1600970,1606534,1607440,1616481
  Merged /hadoop/common/trunk:r1588388-1619000

Modified: hadoop/common/branches/HADOOP-10388/.gitignore
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/.gitignore?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/.gitignore (original)
+++ hadoop/common/branches/HADOOP-10388/.gitignore Tue Aug 19 23:49:39 2014
@@ -1,11 +1,16 @@
 *.iml
 *.ipr
 *.iws
+*.orig
+*.rej
 .idea
 .svn
 .classpath
 .project
 .settings
 target
+hadoop-common-project/hadoop-kms/downloads/
 hadoop-hdfs-project/hadoop-hdfs/downloads
 hadoop-hdfs-project/hadoop-hdfs-httpfs/downloads
+hadoop-common-project/hadoop-common/src/test/resources/contract-test-options.xml
+hadoop-tools/hadoop-openstack/src/test/resources/contract-test-options.xml

Modified: hadoop/common/branches/HADOOP-10388/BUILDING.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/BUILDING.txt?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/BUILDING.txt (original)
+++ hadoop/common/branches/HADOOP-10388/BUILDING.txt Tue Aug 19 23:49:39 2014
@@ -189,6 +189,7 @@ Requirements:
 * Maven 3.0 or later
 * Findbugs 1.3.9 (if running findbugs)
 * ProtocolBuffer 2.5.0
+* CMake 2.6 or newer
 * Windows SDK or Visual Studio 2010 Professional
 * Unix command-line tools from GnuWin32 or Cygwin: sh, mkdir, rm, cp, tar, gzip
 * zlib headers (if building native code bindings for zlib)

Modified: hadoop/common/branches/HADOOP-10388/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml Tue Aug 19 23:49:39 2014
@@ -29,6 +29,7 @@
         <exclude>*-config.cmd</exclude>
         <exclude>start-*.cmd</exclude>
         <exclude>stop-*.cmd</exclude>
+        <exclude>hadoop-layout.sh.example</exclude>
       </excludes>
       <fileMode>0755</fileMode>
     </fileSet>
@@ -42,6 +43,8 @@
       <includes>
         <include>*-config.sh</include>
         <include>*-config.cmd</include>
+        <include>*-functions.sh</include>
+        <include>hadoop-layout.sh.example</include>
       </includes>
       <fileMode>0755</fileMode>
     </fileSet>
@@ -57,6 +60,10 @@
         <exclude>hadoop.cmd</exclude>
         <exclude>hdfs.cmd</exclude>
         <exclude>hadoop-config.cmd</exclude>
+        <exclude>hadoop-functions.sh</exclude>
+        <exclude>hadoop-layout.sh.example</exclude>
+        <exclude>hdfs-config.cmd</exclude>
+        <exclude>hdfs-config.sh</exclude>
       </excludes>
       <fileMode>0755</fileMode>
     </fileSet>

Modified: hadoop/common/branches/HADOOP-10388/hadoop-client/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-client/pom.xml?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-client/pom.xml (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-client/pom.xml Tue Aug 19 23:49:39 2014
@@ -40,22 +40,10 @@
       <scope>compile</scope>
       <exclusions>
         <exclusion>
-          <groupId>tomcat</groupId>
-          <artifactId>jasper-compiler</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>tomcat</groupId>
-          <artifactId>jasper-runtime</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>javax.servlet</groupId>
           <artifactId>servlet-api</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>javax.servlet.jsp</groupId>
-          <artifactId>jsp-api</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>commons-logging</groupId>
           <artifactId>commons-logging-api</artifactId>
         </exclusion>
@@ -73,10 +61,6 @@
         </exclusion>
         <exclusion>
           <groupId>org.mortbay.jetty</groupId>
-          <artifactId>jsp-api-2.1</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.mortbay.jetty</groupId>
           <artifactId>servlet-api-2.5</artifactId>
         </exclusion>
         <exclusion>
@@ -111,10 +95,6 @@
           <groupId>com.jcraft</groupId>
           <artifactId>jsch</artifactId>
         </exclusion>
-        <exclusion>
-          <groupId>commons-el</groupId>
-          <artifactId>commons-el</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
 
@@ -147,14 +127,6 @@
           <groupId>javax.servlet</groupId>
           <artifactId>servlet-api</artifactId>
         </exclusion>
-        <exclusion>
-          <groupId>javax.servlet.jsp</groupId>
-          <artifactId>jsp-api</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>tomcat</groupId>
-          <artifactId>jasper-runtime</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
 

Modified: hadoop/common/branches/HADOOP-10388/hadoop-dist/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-dist/pom.xml?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-dist/pom.xml (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-dist/pom.xml Tue Aug 19 23:49:39 2014
@@ -123,6 +123,7 @@
                       run cp -r $ROOT/hadoop-common-project/hadoop-nfs/target/hadoop-nfs-${project.version}/* .
                       run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version}/* .
                       run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* .
+                      run cp -r $ROOT/hadoop-common-project/hadoop-kms/target/hadoop-kms-${project.version}/* .
                       run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-nfs/target/hadoop-hdfs-nfs-${project.version}/* .
                       run cp -r $ROOT/hadoop-yarn-project/target/hadoop-yarn-project-${project.version}/* .
                       run cp -r $ROOT/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}/* .

Modified: hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/pom.xml?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/pom.xml (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/pom.xml Tue Aug 19 23:49:39 2014
@@ -49,7 +49,6 @@
     <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
-      <version>3.8.1</version>
       <scope>test</scope>
     </dependency>
   </dependencies>

Modified: hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java Tue Aug 19 23:49:39 2014
@@ -1,6 +1,4 @@
 /*
- * Copyright 2012 The Apache Software Foundation.
- *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

Modified: hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java Tue Aug 19 23:49:39 2014
@@ -1,6 +1,4 @@
 /*
- * Copyright 2012 The Apache Software Foundation.
- *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

Modified: hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/FileSetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/FileSetUtils.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/FileSetUtils.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/FileSetUtils.java Tue Aug 19 23:49:39 2014
@@ -1,6 +1,4 @@
 /*
- * Copyright 2012 The Apache Software Foundation.
- *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

Modified: hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java Tue Aug 19 23:49:39 2014
@@ -1,6 +1,4 @@
 /*
- * Copyright 2012 The Apache Software Foundation.
- *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at

Modified: hadoop/common/branches/HADOOP-10388/hadoop-native-core/src/main/native/GenerateProtobufs.cmake
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-native-core/src/main/native/GenerateProtobufs.cmake?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-native-core/src/main/native/GenerateProtobufs.cmake (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-native-core/src/main/native/GenerateProtobufs.cmake Tue Aug 19 23:49:39 2014
@@ -91,4 +91,5 @@ DECLARE_PROTOS(
     ${R}/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/JournalProtocol.proto
     ${R}/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/NamenodeProtocol.proto
     ${R}/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto
+    ${R}/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/xattr.proto
 )

Modified: hadoop/common/branches/HADOOP-10388/hadoop-project-dist/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-project-dist/pom.xml?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-project-dist/pom.xml (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-project-dist/pom.xml Tue Aug 19 23:49:39 2014
@@ -100,6 +100,8 @@
         <artifactId>findbugs-maven-plugin</artifactId>
         <configuration>
           <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
+          <fork>true</fork>
+          <maxHeap>2048</maxHeap>
         </configuration>
       </plugin>
       <plugin>

Modified: hadoop/common/branches/HADOOP-10388/hadoop-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-project/pom.xml?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-project/pom.xml Tue Aug 19 23:49:39 2014
@@ -65,6 +65,8 @@
     <!-- define the protobuf JAR version                               -->
     <protobuf.version>2.5.0</protobuf.version>
     <protoc.path>${env.HADOOP_PROTOC_PATH}</protoc.path>
+
+    <zookeeper.version>3.4.6</zookeeper.version>
   </properties>
 
   <dependencyManagement>
@@ -102,6 +104,12 @@
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-auth</artifactId>
+        <version>${project.version}</version>
+        <type>test-jar</type>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-nfs</artifactId>
         <version>${project.version}</version>
       </dependency>
@@ -312,6 +320,11 @@
         <artifactId>hadoop-native-core</artifactId>
         <version>${project.version}</version>
       </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-azure</artifactId>
+        <version>${project.version}</version>
+      </dependency>
 
       <dependency>
         <groupId>com.google.guava</groupId>
@@ -389,6 +402,11 @@
         <artifactId>jetty-util</artifactId>
         <version>6.1.26</version>
       </dependency>
+      <dependency>
+        <groupId>javax.servlet.jsp</groupId>
+        <artifactId>jsp-api</artifactId>
+        <version>2.1</version>
+      </dependency>
 
       <dependency>
         <groupId>org.glassfish</groupId>
@@ -488,36 +506,6 @@
         <version>6.1.26</version>
       </dependency>
       <dependency>
-        <groupId>tomcat</groupId>
-        <artifactId>jasper-compiler</artifactId>
-        <version>5.5.23</version>
-        <exclusions>
-          <exclusion>
-            <groupId>javax.servlet</groupId>
-            <artifactId>jsp-api</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>ant</groupId>
-            <artifactId>ant</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-      <dependency>
-        <groupId>tomcat</groupId>
-        <artifactId>jasper-runtime</artifactId>
-        <version>5.5.23</version>
-      </dependency>
-      <dependency>
-        <groupId>javax.servlet.jsp</groupId>
-        <artifactId>jsp-api</artifactId>
-        <version>2.1</version>
-      </dependency>
-      <dependency>
-        <groupId>commons-el</groupId>
-        <artifactId>commons-el</artifactId>
-        <version>1.0</version>
-      </dependency>
-      <dependency>
         <groupId>commons-logging</groupId>
         <artifactId>commons-logging</artifactId>
         <version>1.1.3</version>
@@ -596,7 +584,7 @@
       <dependency>
         <groupId>junit</groupId>
         <artifactId>junit</artifactId>
-        <version>4.10</version>
+        <version>4.11</version>
       </dependency>
       <dependency>
         <groupId>commons-lang</groupId>
@@ -624,6 +612,11 @@
         <version>1.7.5</version>
       </dependency>
       <dependency>
+        <groupId>org.slf4j</groupId>
+        <artifactId>jul-to-slf4j</artifactId>
+        <version>1.7.5</version>
+      </dependency>
+      <dependency>
         <groupId>org.eclipse.jdt</groupId>
         <artifactId>core</artifactId>
         <version>3.1.1</version>
@@ -702,7 +695,7 @@
       <dependency>
         <groupId>org.apache.zookeeper</groupId>
         <artifactId>zookeeper</artifactId>
-        <version>3.4.5</version>
+        <version>${zookeeper.version}</version>
         <exclusions>
           <exclusion>
             <!-- otherwise seems to drag in junit 3.8.1 via jline -->
@@ -726,7 +719,7 @@
       <dependency>
         <groupId>org.apache.zookeeper</groupId>
         <artifactId>zookeeper</artifactId>
-        <version>3.4.5</version>
+        <version>${zookeeper.version}</version>
         <type>test-jar</type>
         <scope>test</scope>
         <exclusions>
@@ -739,7 +732,7 @@
       <dependency>
         <groupId>org.apache.bookkeeper</groupId>
         <artifactId>bookkeeper-server</artifactId>
-        <version>4.0.0</version>
+        <version>4.2.3</version>
         <scope>compile</scope>
       </dependency>
       <dependency>
@@ -750,7 +743,7 @@
       <dependency>
         <groupId>com.codahale.metrics</groupId>
         <artifactId>metrics-core</artifactId>
-        <version>3.0.0</version>
+        <version>3.0.1</version>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
@@ -795,6 +788,25 @@
         <artifactId>leveldbjni-all</artifactId>
         <version>1.8</version>
       </dependency>
+
+      <dependency>
+        <groupId>org.apache.directory.server</groupId>
+        <artifactId>apacheds-kerberos-codec</artifactId>
+        <version>2.0.0-M15</version>
+      </dependency>
+
+      <dependency>
+        <groupId>com.microsoft.windowsazure.storage</groupId>
+        <artifactId>microsoft-windowsazure-storage-sdk</artifactId>
+        <version>0.6.0</version>
+     </dependency>
+
+     <dependency>
+       <groupId>xerces</groupId>
+       <artifactId>xercesImpl</artifactId>
+       <version>2.9.1</version>
+     </dependency>
+      
     </dependencies>
   </dependencyManagement>
 
@@ -880,11 +892,6 @@
           <version>${avro.version}</version>
         </plugin>
         <plugin>
-          <groupId>org.codehaus.mojo.jspc</groupId>
-          <artifactId>jspc-maven-plugin</artifactId>
-          <version>2.0-alpha-3</version>
-        </plugin>
-        <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-project-info-reports-plugin</artifactId>
           <version>2.4</version>

Modified: hadoop/common/branches/HADOOP-10388/hadoop-project/src/site/site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-project/src/site/site.xml?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-project/src/site/site.xml (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-project/src/site/site.xml Tue Aug 19 23:49:39 2014
@@ -51,8 +51,10 @@
       <item name="Single Node Setup" href="hadoop-project-dist/hadoop-common/SingleCluster.html"/>
       <item name="Cluster Setup" href="hadoop-project-dist/hadoop-common/ClusterSetup.html"/>
       <item name="Hadoop Commands Reference" href="hadoop-project-dist/hadoop-common/CommandsManual.html"/>
-      <item name="File System Shell" href="hadoop-project-dist/hadoop-common/FileSystemShell.html"/>
+      <item name="FileSystem Shell" href="hadoop-project-dist/hadoop-common/FileSystemShell.html"/>
       <item name="Hadoop Compatibility" href="hadoop-project-dist/hadoop-common/Compatibility.html"/>
+      <item name="FileSystem Specification"
+        href="hadoop-project-dist/hadoop-common/filesystem/index.html"/>
     </menu>
 
     <menu name="Common" inherit="top">
@@ -62,10 +64,12 @@
       <item name="Secure Mode" href="hadoop-project-dist/hadoop-common/SecureMode.html"/>
       <item name="Service Level Authorization" href="hadoop-project-dist/hadoop-common/ServiceLevelAuth.html"/>
       <item name="HTTP Authentication" href="hadoop-project-dist/hadoop-common/HttpAuthentication.html"/>
+      <item name="Hadoop KMS" href="hadoop-kms/index.html"/>
     </menu>
     
     <menu name="HDFS" inherit="top">
       <item name="HDFS User Guide" href="hadoop-project-dist/hadoop-hdfs/HdfsUserGuide.html"/>
+      <item name="HDFS Commands Reference" href="hadoop-project-dist/hadoop-hdfs/HDFSCommands.html"/>
       <item name="High Availability With QJM" href="hadoop-project-dist/hadoop-hdfs/HDFSHighAvailabilityWithQJM.html"/>
       <item name="High Availability With NFS" href="hadoop-project-dist/hadoop-hdfs/HDFSHighAvailabilityWithNFS.html"/>
       <item name="Federation" href="hadoop-project-dist/hadoop-hdfs/Federation.html"/>
@@ -85,13 +89,25 @@
       <item name="Centralized Cache Management" href="hadoop-project-dist/hadoop-hdfs/CentralizedCacheManagement.html"/>
       <item name="HDFS NFS Gateway" href="hadoop-project-dist/hadoop-hdfs/HdfsNfsGateway.html"/>
       <item name="HDFS Rolling Upgrade" href="hadoop-project-dist/hadoop-hdfs/HdfsRollingUpgrade.html"/>
+      <item name="Extended Attributes" href="hadoop-project-dist/hadoop-hdfs/ExtendedAttributes.html"/>
+      <item name="HDFS Support for Multihoming" href="hadoop-project-dist/hadoop-hdfs/HdfsMultihoming.html"/>
     </menu>
 
     <menu name="MapReduce" inherit="top">
+      <item name="MapReduce Tutorial" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html"/>
+      <item name="MapReduce Commands Reference" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapredCommands.html"/>
       <item name="Compatibilty between Hadoop 1.x and Hadoop 2.x" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduce_Compatibility_Hadoop1_Hadoop2.html"/>
       <item name="Encrypted Shuffle" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/EncryptedShuffle.html"/>
       <item name="Pluggable Shuffle/Sort" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/PluggableShuffleAndPluggableSort.html"/>
       <item name="Distributed Cache Deploy" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/DistributedCacheDeploy.html"/>
+      <item name="Hadoop Streaming" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/HadoopStreaming.html"/>
+      <item name="Hadoop Archives" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/HadoopArchives.html"/>
+      <item name="DistCp" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/DistCp.html"/>
+    </menu>
+
+    <menu name="MapReduce REST APIs" inherit="top">
+      <item name="MR Application Master" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapredAppMasterRest.html"/>
+      <item name="MR History Server" href="hadoop-mapreduce-client/hadoop-mapreduce-client-hs/HistoryServerRest.html"/>
     </menu>
 
     <menu name="YARN" inherit="top">
@@ -99,6 +115,7 @@
       <item name="Capacity Scheduler" href="hadoop-yarn/hadoop-yarn-site/CapacityScheduler.html"/>
       <item name="Fair Scheduler" href="hadoop-yarn/hadoop-yarn-site/FairScheduler.html"/>
       <item name="ResourceManager Restart" href="hadoop-yarn/hadoop-yarn-site/ResourceManagerRestart.html"/>
+      <item name="ResourceManager HA" href="hadoop-yarn/hadoop-yarn-site/ResourceManagerHA.html"/>
       <item name="Web Application Proxy" href="hadoop-yarn/hadoop-yarn-site/WebApplicationProxy.html"/>
       <item name="YARN Timeline Server" href="hadoop-yarn/hadoop-yarn-site/TimelineServer.html"/>
       <item name="Writing YARN Applications" href="hadoop-yarn/hadoop-yarn-site/WritingYarnApplications.html"/>
@@ -110,8 +127,6 @@
       <item name="Introduction" href="hadoop-yarn/hadoop-yarn-site/WebServicesIntro.html"/>
       <item name="Resource Manager" href="hadoop-yarn/hadoop-yarn-site/ResourceManagerRest.html"/>
       <item name="Node Manager" href="hadoop-yarn/hadoop-yarn-site/NodeManagerRest.html"/>
-      <item name="MR Application Master" href="hadoop-yarn/hadoop-yarn-site/MapredAppMasterRest.html"/>
-      <item name="History Server" href="hadoop-yarn/hadoop-yarn-site/HistoryServerRest.html"/>
     </menu>
     
     <menu name="Auth" inherit="top">
@@ -127,6 +142,8 @@
       <item name="Common CHANGES.txt" href="hadoop-project-dist/hadoop-common/CHANGES.txt"/>
       <item name="HDFS CHANGES.txt" href="hadoop-project-dist/hadoop-hdfs/CHANGES.txt"/>
       <item name="MapReduce CHANGES.txt" href="hadoop-project-dist/hadoop-mapreduce/CHANGES.txt"/>
+      <item name="YARN CHANGES.txt" href="hadoop-project-dist/hadoop-yarn/CHANGES.txt"/>
+      <item name="Metrics" href="hadoop-project-dist/hadoop-common/Metrics.html"/>
     </menu>
     
     <menu name="Configuration" inherit="top">

Modified: hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java Tue Aug 19 23:49:39 2014
@@ -338,7 +338,7 @@ public class HadoopArchives implements T
    * directories to
    * @param paths the source paths provided by the user. They
    * are glob free and have full path (not relative paths)
-   * @param parentPath the parent path that you wnat the archives
+   * @param parentPath the parent path that you want the archives
    * to be relative to. example - /home/user/dir1 can be archived with
    * parent as /home or /home/user.
    * @throws IOException

Propchange: hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-azure/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Tue Aug 19 23:49:39 2014
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Modified: hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/pom.xml?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/pom.xml (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/pom.xml Tue Aug 19 23:49:39 2014
@@ -192,21 +192,6 @@
           </execution>
         </executions>
       </plugin>
-      <!-- Disable generation of pdf using maven-pdf-plugin until v.1.2 is released.
-           See Hadoop 8064 for details. -->
-      <!--plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-pdf-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>pdf</id>
-            <phase>package</phase>
-            <goals>
-              <goal>pdf</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin-->
     </plugins>
   </build>
 </project>

Modified: hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java Tue Aug 19 23:49:39 2014
@@ -22,7 +22,6 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.IOUtils;
@@ -31,11 +30,15 @@ import org.apache.hadoop.security.Creden
 
 import java.io.IOException;
 import java.lang.reflect.Constructor;
+import java.net.URI;
+import java.util.Set;
+
+import com.google.common.collect.Sets;
 
 /**
  * The CopyListing abstraction is responsible for how the list of
  * sources and targets is constructed, for DistCp's copy function.
- * The copy-listing should be a SequenceFile<Text, FileStatus>,
+ * The copy-listing should be a SequenceFile<Text, CopyListingFileStatus>,
  * located at the path specified to buildListing(),
  * each entry being a pair of (Source relative path, source file status),
  * all the paths being fully qualified.
@@ -85,7 +88,7 @@ public abstract class CopyListing extend
     config.setLong(DistCpConstants.CONF_LABEL_TOTAL_BYTES_TO_BE_COPIED, getBytesToCopy());
     config.setLong(DistCpConstants.CONF_LABEL_TOTAL_NUMBER_OF_RECORDS, getNumberOfPaths());
 
-    checkForDuplicates(pathToListFile);
+    validateFinalListing(pathToListFile, options);
   }
 
   /**
@@ -124,13 +127,16 @@ public abstract class CopyListing extend
   protected abstract long getNumberOfPaths();
 
   /**
-   * Validate the final resulting path listing to see if there are any duplicate entries
+   * Validate the final resulting path listing.  Checks if there are duplicate
+   * entries.  If preserving ACLs, checks that file system can support ACLs.
+   * If preserving XAttrs, checks that file system can support XAttrs.
    *
    * @param pathToListFile - path listing build by doBuildListing
+   * @param options - Input options to distcp
    * @throws IOException - Any issues while checking for duplicates and throws
    * @throws DuplicateFileException - if there are duplicates
    */
-  private void checkForDuplicates(Path pathToListFile)
+  private void validateFinalListing(Path pathToListFile, DistCpOptions options)
       throws DuplicateFileException, IOException {
 
     Configuration config = getConf();
@@ -142,17 +148,35 @@ public abstract class CopyListing extend
                           config, SequenceFile.Reader.file(sortedList));
     try {
       Text lastKey = new Text("*"); //source relative path can never hold *
-      FileStatus lastFileStatus = new FileStatus();
+      CopyListingFileStatus lastFileStatus = new CopyListingFileStatus();
 
       Text currentKey = new Text();
+      Set<URI> aclSupportCheckFsSet = Sets.newHashSet();
+      Set<URI> xAttrSupportCheckFsSet = Sets.newHashSet();
       while (reader.next(currentKey)) {
         if (currentKey.equals(lastKey)) {
-          FileStatus currentFileStatus = new FileStatus();
+          CopyListingFileStatus currentFileStatus = new CopyListingFileStatus();
           reader.getCurrentValue(currentFileStatus);
           throw new DuplicateFileException("File " + lastFileStatus.getPath() + " and " +
               currentFileStatus.getPath() + " would cause duplicates. Aborting");
         }
         reader.getCurrentValue(lastFileStatus);
+        if (options.shouldPreserve(DistCpOptions.FileAttribute.ACL)) {
+          FileSystem lastFs = lastFileStatus.getPath().getFileSystem(config);
+          URI lastFsUri = lastFs.getUri();
+          if (!aclSupportCheckFsSet.contains(lastFsUri)) {
+            DistCpUtils.checkFileSystemAclSupport(lastFs);
+            aclSupportCheckFsSet.add(lastFsUri);
+          }
+        }
+        if (options.shouldPreserve(DistCpOptions.FileAttribute.XATTR)) {
+          FileSystem lastFs = lastFileStatus.getPath().getFileSystem(config);
+          URI lastFsUri = lastFs.getUri();
+          if (!xAttrSupportCheckFsSet.contains(lastFsUri)) {
+            DistCpUtils.checkFileSystemXAttrSupport(lastFs);
+            xAttrSupportCheckFsSet.add(lastFsUri);
+          }
+        }
         lastKey.set(currentKey);
       }
     } finally {
@@ -236,4 +260,16 @@ public abstract class CopyListing extend
       super(message);
     }
   }
+
+  public static class AclsNotSupportedException extends RuntimeException {
+    public AclsNotSupportedException(String message) {
+      super(message);
+    }
+  }
+  
+  public static class XAttrsNotSupportedException extends RuntimeException {
+    public XAttrsNotSupportedException(String message) {
+      super(message);
+    }
+  }
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java Tue Aug 19 23:49:39 2014
@@ -125,6 +125,12 @@ public class DistCp extends Configured i
     } catch (DuplicateFileException e) {
       LOG.error("Duplicate files in input path: ", e);
       return DistCpConstants.DUPLICATE_INPUT;
+    } catch (AclsNotSupportedException e) {
+      LOG.error("ACLs not supported on at least one file system: ", e);
+      return DistCpConstants.ACLS_NOT_SUPPORTED;
+    } catch (XAttrsNotSupportedException e) {
+      LOG.error("XAttrs not supported on at least one file system: ", e);
+      return DistCpConstants.XATTRS_NOT_SUPPORTED;
     } catch (Exception e) {
       LOG.error("Exception encountered ", e);
       return DistCpConstants.UNKNOWN_ERROR;
@@ -298,7 +304,12 @@ public class DistCp extends Configured i
     FileSystem targetFS = targetPath.getFileSystem(configuration);
     targetPath = targetPath.makeQualified(targetFS.getUri(),
                                           targetFS.getWorkingDirectory());
-
+    if (inputOptions.shouldPreserve(DistCpOptions.FileAttribute.ACL)) {
+      DistCpUtils.checkFileSystemAclSupport(targetFS);
+    }
+    if (inputOptions.shouldPreserve(DistCpOptions.FileAttribute.XATTR)) {
+      DistCpUtils.checkFileSystemXAttrSupport(targetFS);
+    }
     if (inputOptions.shouldAtomicCommit()) {
       Path workDir = inputOptions.getAtomicWorkPath();
       if (workDir == null) {

Modified: hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java Tue Aug 19 23:49:39 2014
@@ -50,8 +50,18 @@ public class DistCpConstants {
   public static final String CONF_LABEL_COPY_STRATEGY = "distcp.copy.strategy";
   public static final String CONF_LABEL_SKIP_CRC = "distcp.skip.crc";
   public static final String CONF_LABEL_OVERWRITE = "distcp.copy.overwrite";
+  public static final String CONF_LABEL_APPEND = "distcp.copy.append";
   public static final String CONF_LABEL_BANDWIDTH_MB = "distcp.map.bandwidth.mb";
-
+  
+  public static final String CONF_LABEL_MAX_CHUNKS_TOLERABLE =
+      "distcp.dynamic.max.chunks.tolerable";
+  public static final String CONF_LABEL_MAX_CHUNKS_IDEAL =
+      "distcp.dynamic.max.chunks.ideal";
+  public static final String CONF_LABEL_MIN_RECORDS_PER_CHUNK =
+      "distcp.dynamic.min.records_per_chunk";
+  public static final String CONF_LABEL_SPLIT_RATIO =
+      "distcp.dynamic.split.ratio";
+  
   /* Total bytes to be copied. Updated by copylisting. Unfiltered count */
   public static final String CONF_LABEL_TOTAL_BYTES_TO_BE_COPIED = "mapred.total.bytes.expected";
 
@@ -106,5 +116,16 @@ public class DistCpConstants {
   public static final int SUCCESS = 0;
   public static final int INVALID_ARGUMENT = -1;
   public static final int DUPLICATE_INPUT = -2;
+  public static final int ACLS_NOT_SUPPORTED = -3;
+  public static final int XATTRS_NOT_SUPPORTED = -4;
   public static final int UNKNOWN_ERROR = -999;
+  
+  /**
+   * Constants for DistCp default values of configurable values
+   */
+  public static final int MAX_CHUNKS_TOLERABLE_DEFAULT = 400;
+  public static final int MAX_CHUNKS_IDEAL_DEFAULT     = 100;
+  public static final int MIN_RECORDS_PER_CHUNK_DEFAULT = 5;
+  public static final int SPLIT_RATIO_DEFAULT  = 2;
+
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java Tue Aug 19 23:49:39 2014
@@ -45,8 +45,10 @@ public enum DistCpOptionSwitch {
    *
    */
   PRESERVE_STATUS(DistCpConstants.CONF_LABEL_PRESERVE_STATUS,
-      new Option("p", true, "preserve status (rbugpc)" +
-          "(replication, block-size, user, group, permission, checksum-type)")),
+      new Option("p", true, "preserve status (rbugpcax)(replication, " +
+          "block-size, user, group, permission, checksum-type, ACL, XATTR).  " +
+          "If -p is specified with no <arg>, then preserves replication, " +
+          "block size, user, group, permission and checksum type.")),
 
   /**
    * Update target location by copying only files that are missing
@@ -136,6 +138,10 @@ public enum DistCpOptionSwitch {
       new Option("overwrite", false, "Choose to overwrite target files " +
           "unconditionally, even if they exist.")),
 
+  APPEND(DistCpConstants.CONF_LABEL_APPEND,
+      new Option("append", false,
+          "Reuse existing data in target files and append new data to them if possible")),
+
   /**
    * Should DisctpExecution be blocking
    */
@@ -156,6 +162,7 @@ public enum DistCpOptionSwitch {
   BANDWIDTH(DistCpConstants.CONF_LABEL_BANDWIDTH_MB,
       new Option("bandwidth", true, "Specify bandwidth per map in MB"));
 
+  static final String PRESERVE_STATUS_DEFAULT = "-prbugpc";
   private final String confLabel;
   private final Option option;
 

Modified: hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java Tue Aug 19 23:49:39 2014
@@ -39,6 +39,7 @@ public class DistCpOptions {
   private boolean deleteMissing = false;
   private boolean ignoreFailures = false;
   private boolean overwrite = false;
+  private boolean append = false;
   private boolean skipCRC = false;
   private boolean blocking = true;
 
@@ -65,7 +66,7 @@ public class DistCpOptions {
   private boolean targetPathExists = true;
   
   public static enum FileAttribute{
-    REPLICATION, BLOCKSIZE, USER, GROUP, PERMISSION, CHECKSUMTYPE;
+    REPLICATION, BLOCKSIZE, USER, GROUP, PERMISSION, CHECKSUMTYPE, ACL, XATTR;
 
     public static FileAttribute getAttribute(char symbol) {
       for (FileAttribute attribute : values()) {
@@ -245,6 +246,22 @@ public class DistCpOptions {
   }
 
   /**
+   * @return whether we can append new data to target files
+   */
+  public boolean shouldAppend() {
+    return append;
+  }
+
+  /**
+   * Set if we want to append new data to target files. This is valid only with
+   * update option and CRC is not skipped.
+   */
+  public void setAppend(boolean append) {
+    validate(DistCpOptionSwitch.APPEND, append);
+    this.append = append;
+  }
+
+  /**
    * Should CRC/checksum check be skipped while checking files are identical
    *
    * @return true if checksum check should be skipped while checking files are
@@ -472,6 +489,7 @@ public class DistCpOptions {
         value : this.atomicCommit);
     boolean skipCRC = (option == DistCpOptionSwitch.SKIP_CRC ?
         value : this.skipCRC);
+    boolean append = (option == DistCpOptionSwitch.APPEND ? value : this.append);
 
     if (syncFolder && atomicCommit) {
       throw new IllegalArgumentException("Atomic commit can't be used with " +
@@ -492,6 +510,14 @@ public class DistCpOptions {
       throw new IllegalArgumentException("Skip CRC is valid only with update options");
     }
 
+    if (!syncFolder && append) {
+      throw new IllegalArgumentException(
+          "Append is valid only with update options");
+    }
+    if (skipCRC && append) {
+      throw new IllegalArgumentException(
+          "Append is disallowed when skipping CRC");
+    }
   }
 
   /**
@@ -510,6 +536,8 @@ public class DistCpOptions {
         String.valueOf(deleteMissing));
     DistCpOptionSwitch.addToConf(conf, DistCpOptionSwitch.OVERWRITE,
         String.valueOf(overwrite));
+    DistCpOptionSwitch.addToConf(conf, DistCpOptionSwitch.APPEND,
+        String.valueOf(append));
     DistCpOptionSwitch.addToConf(conf, DistCpOptionSwitch.SKIP_CRC,
         String.valueOf(skipCRC));
     DistCpOptionSwitch.addToConf(conf, DistCpOptionSwitch.BANDWIDTH,

Modified: hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java Tue Aug 19 23:49:39 2014
@@ -50,7 +50,7 @@ public class OptionsParser {
     protected String[] flatten(Options options, String[] arguments, boolean stopAtNonOption) {
       for (int index = 0; index < arguments.length; index++) {
         if (arguments[index].equals("-" + DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())) {
-          arguments[index] = "-prbugpc";
+          arguments[index] = DistCpOptionSwitch.PRESERVE_STATUS_DEFAULT;
         }
       }
       return super.flatten(options, arguments, stopAtNonOption);
@@ -140,6 +140,10 @@ public class OptionsParser {
       option.setOverwrite(true);
     }
 
+    if (command.hasOption(DistCpOptionSwitch.APPEND.getSwitch())) {
+      option.setAppend(true);
+    }
+
     if (command.hasOption(DistCpOptionSwitch.DELETE_MISSING.getSwitch())) {
       option.setDeleteMissing(true);
     }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java Tue Aug 19 23:49:39 2014
@@ -27,7 +27,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.DataInputBuffer;
+import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
 import org.apache.hadoop.tools.util.DistCpUtils;
 import org.apache.hadoop.mapreduce.security.TokenCache;
 import org.apache.hadoop.security.Credentials;
@@ -121,7 +121,7 @@ public class SimpleCopyListing extends C
    *     the the source root is a directory, then the source root entry is not 
    *     written to the sequence file, because only the contents of the source
    *     directory need to be copied in this case.
-   * See {@link org.apache.hadoop.tools.util.DistCpUtils.getRelativePath} for
+   * See {@link org.apache.hadoop.tools.util.DistCpUtils#getRelativePath} for
    *     how relative path is computed.
    * See computeSourceRootPath method for how the root path of the source is
    *     computed.
@@ -139,28 +139,36 @@ public class SimpleCopyListing extends C
 
         FileStatus rootStatus = sourceFS.getFileStatus(path);
         Path sourcePathRoot = computeSourceRootPath(rootStatus, options);
-        boolean localFile = (rootStatus.getClass() != FileStatus.class);
 
         FileStatus[] sourceFiles = sourceFS.listStatus(path);
         boolean explore = (sourceFiles != null && sourceFiles.length > 0);
         if (!explore || rootStatus.isDirectory()) {
-          writeToFileListingRoot(fileListWriter, rootStatus, sourcePathRoot,
-              localFile, options);
+          CopyListingFileStatus rootCopyListingStatus =
+            DistCpUtils.toCopyListingFileStatus(sourceFS, rootStatus,
+              options.shouldPreserve(FileAttribute.ACL), 
+              options.shouldPreserve(FileAttribute.XATTR));
+          writeToFileListingRoot(fileListWriter, rootCopyListingStatus,
+              sourcePathRoot, options);
         }
         if (explore) {
           for (FileStatus sourceStatus: sourceFiles) {
             if (LOG.isDebugEnabled()) {
               LOG.debug("Recording source-path: " + sourceStatus.getPath() + " for copy.");
             }
-            writeToFileListing(fileListWriter, sourceStatus, sourcePathRoot,
-                localFile, options);
+            CopyListingFileStatus sourceCopyListingStatus =
+              DistCpUtils.toCopyListingFileStatus(sourceFS, sourceStatus,
+                options.shouldPreserve(FileAttribute.ACL) &&
+                sourceStatus.isDirectory(), options.shouldPreserve(
+                    FileAttribute.XATTR) && sourceStatus.isDirectory());
+            writeToFileListing(fileListWriter, sourceCopyListingStatus,
+                sourcePathRoot, options);
 
             if (isDirectoryAndNotEmpty(sourceFS, sourceStatus)) {
               if (LOG.isDebugEnabled()) {
                 LOG.debug("Traversing non-empty source dir: " + sourceStatus.getPath());
               }
               traverseNonEmptyDirectory(fileListWriter, sourceStatus, sourcePathRoot,
-                  localFile, options);
+                  options);
             }
           }
         }
@@ -233,7 +241,7 @@ public class SimpleCopyListing extends C
     return SequenceFile.createWriter(getConf(),
             SequenceFile.Writer.file(pathToListFile),
             SequenceFile.Writer.keyClass(Text.class),
-            SequenceFile.Writer.valueClass(FileStatus.class),
+            SequenceFile.Writer.valueClass(CopyListingFileStatus.class),
             SequenceFile.Writer.compression(SequenceFile.CompressionType.NONE));
   }
 
@@ -250,7 +258,6 @@ public class SimpleCopyListing extends C
   private void traverseNonEmptyDirectory(SequenceFile.Writer fileListWriter,
                                          FileStatus sourceStatus,
                                          Path sourcePathRoot,
-                                         boolean localFile,
                                          DistCpOptions options)
                                          throws IOException {
     FileSystem sourceFS = sourcePathRoot.getFileSystem(getConf());
@@ -262,8 +269,12 @@ public class SimpleCopyListing extends C
         if (LOG.isDebugEnabled())
           LOG.debug("Recording source-path: "
                     + sourceStatus.getPath() + " for copy.");
-        writeToFileListing(fileListWriter, child, sourcePathRoot,
-             localFile, options);
+        CopyListingFileStatus childCopyListingStatus =
+          DistCpUtils.toCopyListingFileStatus(sourceFS, child,
+            options.shouldPreserve(FileAttribute.ACL) && child.isDirectory(), 
+            options.shouldPreserve(FileAttribute.XATTR) && child.isDirectory());
+        writeToFileListing(fileListWriter, childCopyListingStatus,
+             sourcePathRoot, options);
         if (isDirectoryAndNotEmpty(sourceFS, child)) {
           if (LOG.isDebugEnabled())
             LOG.debug("Traversing non-empty source dir: "
@@ -275,8 +286,7 @@ public class SimpleCopyListing extends C
   }
   
   private void writeToFileListingRoot(SequenceFile.Writer fileListWriter,
-      FileStatus fileStatus, Path sourcePathRoot,
-      boolean localFile,
+      CopyListingFileStatus fileStatus, Path sourcePathRoot,
       DistCpOptions options) throws IOException {
     boolean syncOrOverwrite = options.shouldSyncFolder() ||
         options.shouldOverwrite();
@@ -288,14 +298,12 @@ public class SimpleCopyListing extends C
       }      
       return;
     }
-    writeToFileListing(fileListWriter, fileStatus, sourcePathRoot, localFile,
-        options);
+    writeToFileListing(fileListWriter, fileStatus, sourcePathRoot, options);
   }
 
   private void writeToFileListing(SequenceFile.Writer fileListWriter,
-                                  FileStatus fileStatus,
+                                  CopyListingFileStatus fileStatus,
                                   Path sourcePathRoot,
-                                  boolean localFile,
                                   DistCpOptions options) throws IOException {
     if (LOG.isDebugEnabled()) {
       LOG.debug("REL PATH: " + DistCpUtils.getRelativePath(sourcePathRoot,
@@ -303,9 +311,6 @@ public class SimpleCopyListing extends C
     }
 
     FileStatus status = fileStatus;
-    if (localFile) {
-      status = getFileStatus(fileStatus);
-    }
 
     if (!shouldCopy(fileStatus.getPath(), options)) {
       return;
@@ -320,19 +325,4 @@ public class SimpleCopyListing extends C
     }
     totalPaths++;
   }
-
-  private static final ByteArrayOutputStream buffer = new ByteArrayOutputStream(64);
-  private DataInputBuffer in = new DataInputBuffer();
-  
-  private FileStatus getFileStatus(FileStatus fileStatus) throws IOException {
-    FileStatus status = new FileStatus();
-
-    buffer.reset();
-    DataOutputStream out = new DataOutputStream(buffer);
-    fileStatus.write(out);
-
-    in.reset(buffer.toByteArray(), 0, buffer.size());
-    status.readFields(in);
-    return status;
-  }
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java Tue Aug 19 23:49:39 2014
@@ -178,7 +178,7 @@ public class CopyCommitter extends FileO
 
     long preservedEntries = 0;
     try {
-      FileStatus srcFileStatus = new FileStatus();
+      CopyListingFileStatus srcFileStatus = new CopyListingFileStatus();
       Text srcRelPath = new Text();
 
       // Iterate over every source path that was copied.
@@ -246,9 +246,9 @@ public class CopyCommitter extends FileO
     // Delete all from target that doesn't also exist on source.
     long deletedEntries = 0;
     try {
-      FileStatus srcFileStatus = new FileStatus();
+      CopyListingFileStatus srcFileStatus = new CopyListingFileStatus();
       Text srcRelPath = new Text();
-      FileStatus trgtFileStatus = new FileStatus();
+      CopyListingFileStatus trgtFileStatus = new CopyListingFileStatus();
       Text trgtRelPath = new Text();
 
       FileSystem targetFS = targetFinalPath.getFileSystem(conf);

Modified: hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java Tue Aug 19 23:49:39 2014
@@ -18,15 +18,24 @@
 
 package org.apache.hadoop.tools.mapred;
 
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Arrays;
+import java.util.EnumSet;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileChecksum;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.tools.CopyListingFileStatus;
 import org.apache.hadoop.tools.DistCpConstants;
 import org.apache.hadoop.tools.DistCpOptionSwitch;
 import org.apache.hadoop.tools.DistCpOptions;
@@ -34,15 +43,11 @@ import org.apache.hadoop.tools.DistCpOpt
 import org.apache.hadoop.tools.util.DistCpUtils;
 import org.apache.hadoop.util.StringUtils;
 
-import java.io.*;
-import java.util.EnumSet;
-import java.util.Arrays;
-
 /**
  * Mapper class that executes the DistCp copy operation.
  * Implements the o.a.h.mapreduce.Mapper<> interface.
  */
-public class CopyMapper extends Mapper<Text, FileStatus, Text, Text> {
+public class CopyMapper extends Mapper<Text, CopyListingFileStatus, Text, Text> {
 
   /**
    * Hadoop counters for the DistCp CopyMapper.
@@ -59,6 +64,15 @@ public class CopyMapper extends Mapper<T
     BYTESSKIPPED, // Number of bytes that were skipped from copy.
   }
 
+  /**
+   * Indicate the action for each file
+   */
+  static enum FileAction {
+    SKIP,         // Skip copying the file since it's already in the target FS
+    APPEND,       // Only need to append new data to the file in the target FS 
+    OVERWRITE,    // Overwrite the whole file
+  }
+
   private static Log LOG = LogFactory.getLog(CopyMapper.class);
 
   private Configuration conf;
@@ -67,6 +81,7 @@ public class CopyMapper extends Mapper<T
   private boolean ignoreFailures = false;
   private boolean skipCrc = false;
   private boolean overWrite = false;
+  private boolean append = false;
   private EnumSet<FileAttribute> preserve = EnumSet.noneOf(FileAttribute.class);
 
   private FileSystem targetFS = null;
@@ -87,6 +102,7 @@ public class CopyMapper extends Mapper<T
     ignoreFailures = conf.getBoolean(DistCpOptionSwitch.IGNORE_FAILURES.getConfigLabel(), false);
     skipCrc = conf.getBoolean(DistCpOptionSwitch.SKIP_CRC.getConfigLabel(), false);
     overWrite = conf.getBoolean(DistCpOptionSwitch.OVERWRITE.getConfigLabel(), false);
+    append = conf.getBoolean(DistCpOptionSwitch.APPEND.getConfigLabel(), false);
     preserve = DistCpUtils.unpackAttributes(conf.get(DistCpOptionSwitch.
         PRESERVE_STATUS.getConfigLabel()));
 
@@ -172,8 +188,8 @@ public class CopyMapper extends Mapper<T
    * @throws IOException
    */
   @Override
-  public void map(Text relPath, FileStatus sourceFileStatus, Context context)
-          throws IOException, InterruptedException {
+  public void map(Text relPath, CopyListingFileStatus sourceFileStatus,
+          Context context) throws IOException, InterruptedException {
     Path sourcePath = sourceFileStatus.getPath();
 
     if (LOG.isDebugEnabled())
@@ -191,11 +207,14 @@ public class CopyMapper extends Mapper<T
     LOG.info(description);
 
     try {
-      FileStatus sourceCurrStatus;
+      CopyListingFileStatus sourceCurrStatus;
       FileSystem sourceFS;
       try {
         sourceFS = sourcePath.getFileSystem(conf);
-        sourceCurrStatus = sourceFS.getFileStatus(sourcePath);
+        sourceCurrStatus = DistCpUtils.toCopyListingFileStatus(sourceFS,
+          sourceFS.getFileStatus(sourcePath),
+          fileAttributes.contains(FileAttribute.ACL), 
+          fileAttributes.contains(FileAttribute.XATTR));
       } catch (FileNotFoundException e) {
         throw new IOException(new RetriableFileCopyCommand.CopyReadException(e));
       }
@@ -219,20 +238,19 @@ public class CopyMapper extends Mapper<T
         return;
       }
 
-      if (skipFile(sourceFS, sourceCurrStatus, target)) {
+      FileAction action = checkUpdate(sourceFS, sourceCurrStatus, target);
+      if (action == FileAction.SKIP) {
         LOG.info("Skipping copy of " + sourceCurrStatus.getPath()
                  + " to " + target);
         updateSkipCounters(context, sourceCurrStatus);
         context.write(null, new Text("SKIP: " + sourceCurrStatus.getPath()));
-      }
-      else {
+      } else {
         copyFileWithRetry(description, sourceCurrStatus, target, context,
-                          fileAttributes);
+            action, fileAttributes);
       }
 
       DistCpUtils.preserve(target.getFileSystem(conf), target,
                            sourceCurrStatus, fileAttributes);
-
     } catch (IOException exception) {
       handleFailures(exception, sourceFileStatus, target, context);
     }
@@ -249,14 +267,14 @@ public class CopyMapper extends Mapper<T
     return DistCpUtils.unpackAttributes(attributeString);
   }
 
-  private void copyFileWithRetry(String description, FileStatus sourceFileStatus,
-               Path target, Context context,
-               EnumSet<DistCpOptions.FileAttribute> fileAttributes) throws IOException {
-
+  private void copyFileWithRetry(String description,
+      FileStatus sourceFileStatus, Path target, Context context,
+      FileAction action, EnumSet<DistCpOptions.FileAttribute> fileAttributes)
+      throws IOException {
     long bytesCopied;
     try {
-      bytesCopied = (Long)new RetriableFileCopyCommand(skipCrc, description)
-                       .execute(sourceFileStatus, target, context, fileAttributes);
+      bytesCopied = (Long) new RetriableFileCopyCommand(skipCrc, description,
+          action).execute(sourceFileStatus, target, context, fileAttributes);
     } catch (Exception e) {
       context.setStatus("Copy Failure: " + sourceFileStatus.getPath());
       throw new IOException("File copy failed: " + sourceFileStatus.getPath() +
@@ -306,25 +324,48 @@ public class CopyMapper extends Mapper<T
     context.getCounter(counter).increment(value);
   }
 
-  private boolean skipFile(FileSystem sourceFS, FileStatus source, Path target)
-                                          throws IOException {
-    return     targetFS.exists(target)
-            && !overWrite
-            && !mustUpdate(sourceFS, source, target);
-  }
-
-  private boolean mustUpdate(FileSystem sourceFS, FileStatus source, Path target)
-                                    throws IOException {
-    final FileStatus targetFileStatus = targetFS.getFileStatus(target);
-
-    return     syncFolders
-            && (
-                   targetFileStatus.getLen() != source.getLen()
-                || (!skipCrc &&
-                       !DistCpUtils.checksumsAreEqual(sourceFS,
-                          source.getPath(), null, targetFS, target))
-                || (source.getBlockSize() != targetFileStatus.getBlockSize() &&
-                      preserve.contains(FileAttribute.BLOCKSIZE))
-               );
+  private FileAction checkUpdate(FileSystem sourceFS, FileStatus source,
+      Path target) throws IOException {
+    final FileStatus targetFileStatus;
+    try {
+      targetFileStatus = targetFS.getFileStatus(target);
+    } catch (FileNotFoundException e) {
+      return FileAction.OVERWRITE;
+    }
+    if (targetFileStatus != null && !overWrite) {
+      if (canSkip(sourceFS, source, targetFileStatus)) {
+        return FileAction.SKIP;
+      } else if (append) {
+        long targetLen = targetFileStatus.getLen();
+        if (targetLen < source.getLen()) {
+          FileChecksum sourceChecksum = sourceFS.getFileChecksum(
+              source.getPath(), targetLen);
+          if (sourceChecksum != null
+              && sourceChecksum.equals(targetFS.getFileChecksum(target))) {
+            // We require that the checksum is not null. Thus currently only
+            // DistributedFileSystem is supported
+            return FileAction.APPEND;
+          }
+        }
+      }
+    }
+    return FileAction.OVERWRITE;
+  }
+
+  private boolean canSkip(FileSystem sourceFS, FileStatus source, 
+      FileStatus target) throws IOException {
+    if (!syncFolders) {
+      return true;
+    }
+    boolean sameLength = target.getLen() == source.getLen();
+    boolean sameBlockSize = source.getBlockSize() == target.getBlockSize()
+        || !preserve.contains(FileAttribute.BLOCKSIZE);
+    if (sameLength && sameBlockSize) {
+      return skipCrc ||
+          DistCpUtils.checksumsAreEqual(sourceFS, source.getPath(), null,
+              targetFS, target.getPath());
+    } else {
+      return false;
+    }
   }
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java Tue Aug 19 23:49:39 2014
@@ -18,10 +18,8 @@
 
 package org.apache.hadoop.tools.mapred;
 
-import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
 import java.io.IOException;
-import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.EnumSet;
 
@@ -29,6 +27,8 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CreateFlag;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileChecksum;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -39,6 +39,7 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.tools.DistCpConstants;
 import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
+import org.apache.hadoop.tools.mapred.CopyMapper.FileAction;
 import org.apache.hadoop.tools.util.DistCpUtils;
 import org.apache.hadoop.tools.util.RetriableCommand;
 import org.apache.hadoop.tools.util.ThrottledInputStream;
@@ -54,13 +55,15 @@ public class RetriableFileCopyCommand ex
   private static Log LOG = LogFactory.getLog(RetriableFileCopyCommand.class);
   private static int BUFFER_SIZE = 8 * 1024;
   private boolean skipCrc = false;
+  private FileAction action;
 
   /**
    * Constructor, taking a description of the action.
    * @param description Verbose description of the copy operation.
    */
-  public RetriableFileCopyCommand(String description) {
+  public RetriableFileCopyCommand(String description, FileAction action) {
     super(description);
+    this.action = action;
   }
 
   /**
@@ -68,9 +71,11 @@ public class RetriableFileCopyCommand ex
    *
    * @param skipCrc Whether to skip the crc check.
    * @param description A verbose description of the copy operation.
+   * @param action We should overwrite the target file or append new data to it.
    */
-  public RetriableFileCopyCommand(boolean skipCrc, String description) {
-    this(description);
+  public RetriableFileCopyCommand(boolean skipCrc, String description,
+      FileAction action) {
+    this(description, action);
     this.skipCrc = skipCrc;
   }
 
@@ -96,18 +101,17 @@ public class RetriableFileCopyCommand ex
   }
 
   private long doCopy(FileStatus sourceFileStatus, Path target,
-                      Mapper.Context context,
-                      EnumSet<FileAttribute> fileAttributes)
-          throws IOException {
-
-    Path tmpTargetPath = getTmpFile(target, context);
+      Mapper.Context context, EnumSet<FileAttribute> fileAttributes)
+      throws IOException {
+    final boolean toAppend = action == FileAction.APPEND;
+    Path targetPath = toAppend ? target : getTmpFile(target, context);
     final Configuration configuration = context.getConfiguration();
     FileSystem targetFS = target.getFileSystem(configuration);
 
     try {
       if (LOG.isDebugEnabled()) {
         LOG.debug("Copying " + sourceFileStatus.getPath() + " to " + target);
-        LOG.debug("Tmp-file path: " + tmpTargetPath);
+        LOG.debug("Target file path: " + targetPath);
       }
       final Path sourcePath = sourceFileStatus.getPath();
       final FileSystem sourceFS = sourcePath.getFileSystem(configuration);
@@ -115,22 +119,31 @@ public class RetriableFileCopyCommand ex
           .contains(FileAttribute.CHECKSUMTYPE) ? sourceFS
           .getFileChecksum(sourcePath) : null;
 
-      long bytesRead = copyToTmpFile(tmpTargetPath, targetFS, sourceFileStatus,
-          context, fileAttributes, sourceChecksum);
+      final long offset = action == FileAction.APPEND ? targetFS.getFileStatus(
+          target).getLen() : 0;
+      long bytesRead = copyToFile(targetPath, targetFS, sourceFileStatus,
+          offset, context, fileAttributes, sourceChecksum);
 
-      compareFileLengths(sourceFileStatus, tmpTargetPath, configuration,
-          bytesRead);
+      compareFileLengths(sourceFileStatus, targetPath, configuration, bytesRead
+          + offset);
       //At this point, src&dest lengths are same. if length==0, we skip checksum
       if ((bytesRead != 0) && (!skipCrc)) {
         compareCheckSums(sourceFS, sourceFileStatus.getPath(), sourceChecksum,
-            targetFS, tmpTargetPath);
+            targetFS, targetPath);
+      }
+      // it's not append case, thus we first write to a temporary file, rename
+      // it to the target path.
+      if (!toAppend) {
+        promoteTmpToTarget(targetPath, target, targetFS);
       }
-      promoteTmpToTarget(tmpTargetPath, target, targetFS);
       return bytesRead;
-
     } finally {
-      if (targetFS.exists(tmpTargetPath))
-        targetFS.delete(tmpTargetPath, false);
+      // note that for append case, it is possible that we append partial data
+      // and then fail. In that case, for the next retry, we either reuse the
+      // partial appended data if it is good or we overwrite the whole file
+      if (!toAppend && targetFS.exists(targetPath)) {
+        targetFS.delete(targetPath, false);
+      }
     }
   }
 
@@ -147,29 +160,37 @@ public class RetriableFileCopyCommand ex
     return null;
   }
 
-  private long copyToTmpFile(Path tmpTargetPath, FileSystem targetFS,
-      FileStatus sourceFileStatus, Mapper.Context context,
+  private long copyToFile(Path targetPath, FileSystem targetFS,
+      FileStatus sourceFileStatus, long sourceOffset, Mapper.Context context,
       EnumSet<FileAttribute> fileAttributes, final FileChecksum sourceChecksum)
       throws IOException {
     FsPermission permission = FsPermission.getFileDefault().applyUMask(
         FsPermission.getUMask(targetFS.getConf()));
-    OutputStream outStream = new BufferedOutputStream(
-        targetFS.create(tmpTargetPath, permission,
-            EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE), BUFFER_SIZE,
-            getReplicationFactor(fileAttributes, sourceFileStatus, targetFS,
-                tmpTargetPath),
-            getBlockSize(fileAttributes, sourceFileStatus, targetFS,
-                tmpTargetPath),
-            context, getChecksumOpt(fileAttributes, sourceChecksum)));
-    return copyBytes(sourceFileStatus, outStream, BUFFER_SIZE, context);
+    final OutputStream outStream;
+    if (action == FileAction.OVERWRITE) {
+      final short repl = getReplicationFactor(fileAttributes, sourceFileStatus,
+          targetFS, targetPath);
+      final long blockSize = getBlockSize(fileAttributes, sourceFileStatus,
+          targetFS, targetPath);
+      FSDataOutputStream out = targetFS.create(targetPath, permission,
+          EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE),
+          BUFFER_SIZE, repl, blockSize, context,
+          getChecksumOpt(fileAttributes, sourceChecksum));
+      outStream = new BufferedOutputStream(out);
+    } else {
+      outStream = new BufferedOutputStream(targetFS.append(targetPath,
+          BUFFER_SIZE));
+    }
+    return copyBytes(sourceFileStatus, sourceOffset, outStream, BUFFER_SIZE,
+        context);
   }
 
   private void compareFileLengths(FileStatus sourceFileStatus, Path target,
-                                  Configuration configuration, long bytesRead)
+                                  Configuration configuration, long targetLen)
                                   throws IOException {
     final Path sourcePath = sourceFileStatus.getPath();
     FileSystem fs = sourcePath.getFileSystem(configuration);
-    if (fs.getFileStatus(sourcePath).getLen() != bytesRead)
+    if (fs.getFileStatus(sourcePath).getLen() != targetLen)
       throw new IOException("Mismatch in length of source:" + sourcePath
                 + " and target:" + target);
   }
@@ -215,8 +236,8 @@ public class RetriableFileCopyCommand ex
   }
 
   @VisibleForTesting
-  long copyBytes(FileStatus sourceFileStatus, OutputStream outStream,
-                         int bufferSize, Mapper.Context context)
+  long copyBytes(FileStatus sourceFileStatus, long sourceOffset,
+      OutputStream outStream, int bufferSize, Mapper.Context context)
       throws IOException {
     Path source = sourceFileStatus.getPath();
     byte buf[] = new byte[bufferSize];
@@ -225,19 +246,21 @@ public class RetriableFileCopyCommand ex
 
     try {
       inStream = getInputStream(source, context.getConfiguration());
-      int bytesRead = readBytes(inStream, buf);
+      int bytesRead = readBytes(inStream, buf, sourceOffset);
       while (bytesRead >= 0) {
         totalBytesRead += bytesRead;
+        if (action == FileAction.APPEND) {
+          sourceOffset += bytesRead;
+        }
         outStream.write(buf, 0, bytesRead);
         updateContextStatus(totalBytesRead, context, sourceFileStatus);
-        bytesRead = inStream.read(buf);
+        bytesRead = readBytes(inStream, buf, sourceOffset);
       }
       outStream.close();
       outStream = null;
     } finally {
       IOUtils.cleanup(LOG, outStream, inStream);
     }
-
     return totalBytesRead;
   }
 
@@ -254,24 +277,27 @@ public class RetriableFileCopyCommand ex
     context.setStatus(message.toString());
   }
 
-  private static int readBytes(InputStream inStream, byte buf[])
-          throws IOException {
+  private static int readBytes(ThrottledInputStream inStream, byte buf[],
+      long position) throws IOException {
     try {
-      return inStream.read(buf);
-    }
-    catch (IOException e) {
+      if (position == 0) {
+        return inStream.read(buf);
+      } else {
+        return inStream.read(position, buf, 0, buf.length);
+      }
+    } catch (IOException e) {
       throw new CopyReadException(e);
     }
   }
 
-  private static ThrottledInputStream getInputStream(Path path, Configuration conf)
-          throws IOException {
+  private static ThrottledInputStream getInputStream(Path path,
+      Configuration conf) throws IOException {
     try {
       FileSystem fs = path.getFileSystem(conf);
       long bandwidthMB = conf.getInt(DistCpConstants.CONF_LABEL_BANDWIDTH_MB,
               DistCpConstants.DEFAULT_BANDWIDTH_MB);
-      return new ThrottledInputStream(new BufferedInputStream(fs.open(path)),
-              bandwidthMB * 1024 * 1024);
+      FSDataInputStream in = fs.open(path);
+      return new ThrottledInputStream(in, bandwidthMB * 1024 * 1024);
     }
     catch (IOException e) {
       throw new CopyReadException(e);



Mime
View raw message