hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject svn commit: r1446832 [1/2] - in /hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common: ./ dev-support/ src/main/bin/ src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/fs/shell/ src/main/jav...
Date Sat, 16 Feb 2013 01:12:12 GMT
Author: atm
Date: Sat Feb 16 01:12:07 2013
New Revision: 1446832

URL: http://svn.apache.org/r1446832
Log:
Merge trunk into HDFS-347 branch.

Added:
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ConnectTimeoutException.java
      - copied unchanged from r1446830, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ConnectTimeoutException.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/test-untar.tar
      - copied unchanged from r1446830, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/test-untar.tar
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/test-untar.tgz
      - copied unchanged from r1446830, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/test-untar.tgz
Removed:
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/ddl/buffer.jr
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/ddl/int.jr
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/ddl/string.jr
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/ddl/test.jr
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/FromCpp.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/RecordBench.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestBuffer.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordIO.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordVersioning.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/ToCpp.java
Modified:
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/pom.xml
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/Errno.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/errno_enum.c
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBoundedByteArrayOutputStream.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSortedMapWritable.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tools/GetGroupsTestBase.java
    hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/CHANGES.txt Sat Feb 16 01:12:07 2013
@@ -146,8 +146,9 @@ Trunk (Unreleased)
     HADOOP-9162. Add utility to check native library availability.
     (Binglin Chang via suresh)
 
-    HADOOP-8924. Add maven plugin alternative to shell script to save
-    package-info.java. (Chris Nauroth via suresh)
+    HADOOP-9277. Improve javadoc for FileContext. (Andrew Wang via suresh)
+
+    HADOOP-9218 Document the Rpc-wrappers used internally (sanjay Radia)
 
   BUG FIXES
 
@@ -319,24 +320,60 @@ Trunk (Unreleased)
     HADOOP-9202. test-patch.sh fails during mvn eclipse:eclipse if patch adds
     a new module to the build (Chris Nauroth via bobby)
 
-    HADOOP-9245. mvn clean without running mvn install before fails.
-    (Karthik Kambatla via suresh)
-
     HADOOP-9249. hadoop-maven-plugins version-info goal causes build failure
     when running with Clover. (Chris Nauroth via suresh)
 
+    HADOOP-9264. Port change to use Java untar API on Windows from 
+    branch-1-win to trunk. (Chris Nauroth via suresh)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
 
     HADOOP-8589 ViewFs tests fail when tests and home dirs are nested (sanjay Radia)
 
+    HADOOP-9190. packaging docs is broken. (Andy Isaacson via atm)
+
+Release 2.0.4-beta - UNRELEASED
+
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+
+    HADOOP-9283. Add support for running the Hadoop client on AIX. (atm)
+
+  IMPROVEMENTS
+
+    HADOOP-9253. Capture ulimit info in the logs at service start time.
+    (Arpit Gupta via suresh)
+
+    HADOOP-8924. Add maven plugin alternative to shell script to save
+    package-info.java. (Chris Nauroth via suresh)
+
+    HADOOP-9117. replace protoc ant plugin exec with a maven plugin. (tucu)
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
+    HADOOP-9294. GetGroupsTestBase fails on Windows. (Chris Nauroth via suresh)
+
+    HADOOP-9305. Add support for running the Hadoop client on 64-bit AIX. (atm)
+
+    HADOOP-9245. mvn clean without running mvn install before fails.
+    (Karthik Kambatla via suresh)
+
     HADOOP-9246 Execution phase for hadoop-maven-plugin should be
     process-resources (Karthik Kambatla and Chris Nauroth via jlowe)
 
-    HADOOP-9190. packaging docs is broken. (Andy Isaacson via atm)
+    HADOOP-9297. remove old record IO generation and tests. (tucu)
 
-Release 2.0.3-alpha - Unreleased 
+    HADOOP-9154. SortedMapWritable#putAll() doesn't add key/value classes to
+    the map. (Karthik Kambatla via tomwhite)
+
+    HADOOP-9304. remove addition of avro genreated-sources dirs to build. (tucu)
+
+Release 2.0.3-alpha - 2013-02-06 
 
   INCOMPATIBLE CHANGES
 
@@ -464,6 +501,9 @@ Release 2.0.3-alpha - Unreleased 
     HADOOP-9231. Parametrize staging URL for the uniformity of
     distributionManagement. (Konstantin Boudnik via suresh)
 
+    HADOOP-9276. Allow BoundedByteArrayOutputStream to be resettable.
+    (Arun Murthy via hitesh)
+
   OPTIMIZATIONS
 
     HADOOP-8866. SampleQuantiles#query is O(N^2) instead of O(N). (Andrew Wang
@@ -588,6 +628,24 @@ Release 2.0.3-alpha - Unreleased 
 
     HADOOP-9221. Convert remaining xdocs to APT. (Andy Isaacson via atm)
 
+    HADOOP-8981. TestMetricsSystemImpl fails on Windows. (Xuan Gong via suresh)
+    
+    HADOOP-9124. SortedMapWritable violates contract of Map interface for
+    equals() and hashCode(). (Surenkumar Nihalani via tomwhite)
+
+    HADOOP-9252. In StringUtils, humanReadableInt(..) has a race condition and
+    the synchronization of limitDecimalTo2(double) can be avoided.  (szetszwo)
+
+    HADOOP-9260. Hadoop version may be not correct when starting name node or
+    data node. (Chris Nauroth via jlowe)
+
+    HADOOP-9278. Fix the file handle leak in HarMetaData.parseMetaData() in
+    HarFileSystem. (Chris Nauroth via szetszwo)
+
+    HADOOP-9289. FsShell rm -f fails for non-matching globs. (Daryn Sharp via
+    suresh)
+
+
 Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES
@@ -1289,10 +1347,19 @@ Release 0.23.7 - UNRELEASED
     HADOOP-8849. FileUtil#fullyDelete should grant the target directories +rwx
     permissions (Ivan A. Veselovsky via bobby)
 
+    HADOOP-9067. provide test for LocalFileSystem.reportChecksumFailure
+    (Ivan A. Veselovsky via bobby) 
+
   OPTIMIZATIONS
 
   BUG FIXES
 
+    HADOOP-9302. HDFS docs not linked from top level (Andy Isaacson via
+    tgraves)
+
+    HADOOP-9303. command manual dfsadmin missing entry for restoreFailedStorage
+    option (Andy Isaacson via tgraves)
+
 Release 0.23.6 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1440578-1446830

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml Sat Feb 16 01:12:07 2013
@@ -286,6 +286,10 @@
       <!-- protobuf generated code -->
       <Class name="~org\.apache\.hadoop\.security\.proto\.SecurityProtos.*"/>
     </Match>
+    <Match>
+      <!-- protobuf generated code -->
+      <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.TestProtos.*"/>
+    </Match>
 
     <!--
        Manually checked, misses child thread manually syncing on parent's intrinsic lock.

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/pom.xml?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/pom.xml (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/pom.xml Sat Feb 16 01:12:07 2013
@@ -241,6 +241,11 @@
       <type>test-jar</type>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-compress</artifactId>
+      <version>1.4</version>
+    </dependency>
   </dependencies>
 
   <build>
@@ -288,6 +293,51 @@
               </source>
             </configuration>
           </execution>
+          <execution>
+            <id>compile-protoc</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>protoc</goal>
+            </goals>
+            <configuration>
+              <imports>
+                <param>${basedir}/src/main/proto</param>
+              </imports>
+              <source>
+                <directory>${basedir}/src/main/proto</directory>
+                <includes>
+                  <include>HAServiceProtocol.proto</include>
+                  <include>IpcConnectionContext.proto</include>
+                  <include>ProtocolInfo.proto</include>
+                  <include>RpcHeader.proto</include>
+                  <include>ZKFCProtocol.proto</include>
+                  <include>ProtobufRpcEngine.proto</include>
+                  <include>Security.proto</include>
+                </includes>
+              </source>
+              <output>${project.build.directory}/generated-sources/java</output>
+            </configuration>
+          </execution>
+          <execution>
+            <id>compile-test-protoc</id>
+            <phase>generate-test-sources</phase>
+            <goals>
+              <goal>protoc</goal>
+            </goals>
+            <configuration>
+              <imports>
+                <param>${basedir}/src/test/proto</param>
+              </imports>
+              <source>
+                <directory>${basedir}/src/test/proto</directory>
+                <includes>
+                  <include>test.proto</include>
+                  <include>test_rpc_service.proto</include>
+                </includes>
+              </source>
+              <output>${project.build.directory}/generated-test-sources/java</output>
+            </configuration>
+          </execution>
         </executions>
       </plugin>
       <plugin>
@@ -327,39 +377,6 @@
         <artifactId>maven-antrun-plugin</artifactId>
         <executions>
           <execution>
-            <id>create-protobuf-generated-sources-directory</id>
-            <phase>initialize</phase>
-            <goals>
-              <goal>run</goal>
-            </goals>
-            <configuration>
-              <target>
-                <mkdir dir="target/generated-sources/java" />
-                <mkdir dir="target/generated-test-sources/java" />
-              </target>
-            </configuration>
-          </execution>
-          <execution>
-            <id>generate-test-sources</id>
-            <phase>generate-test-sources</phase>
-            <goals>
-              <goal>run</goal>
-            </goals>
-            <configuration>
-              <target>
-
-                <mkdir dir="${project.build.directory}/generated-test-sources/java"/>
-
-                <taskdef name="recordcc" classname="org.apache.hadoop.record.compiler.ant.RccTask">
-                  <classpath refid="maven.compile.classpath"/>
-                </taskdef>
-                <recordcc destdir="${project.build.directory}/generated-test-sources/java">
-                  <fileset dir="${basedir}/src/test/ddl" includes="**/*.jr"/>
-                </recordcc>
-              </target>
-            </configuration>
-          </execution>
-          <execution>
             <id>create-log-dir</id>
             <phase>process-test-resources</phase>
             <goals>
@@ -382,88 +399,32 @@
             </configuration>
           </execution>
           <execution>
-            <phase>pre-site</phase>
+            <id>copy-test-tarballs</id>
+            <phase>process-test-resources</phase>
             <goals>
               <goal>run</goal>
             </goals>
             <configuration>
-              <tasks>
-                <copy file="src/main/resources/core-default.xml" todir="src/site/resources"/>
-                <copy file="src/main/xsl/configuration.xsl" todir="src/site/resources"/>
-              </tasks>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>exec-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>compile-proto</id>
-            <phase>generate-sources</phase>
-            <goals>
-              <goal>exec</goal>
-            </goals>
-            <configuration>
-              <executable>protoc</executable>
-              <arguments>
-                <argument>-Isrc/main/proto/</argument>
-                <argument>--java_out=target/generated-sources/java</argument>
-                <argument>src/main/proto/HAServiceProtocol.proto</argument>
-                <argument>src/main/proto/IpcConnectionContext.proto</argument>
-                <argument>src/main/proto/ProtocolInfo.proto</argument>
-                <argument>src/main/proto/RpcHeader.proto</argument>
-                <argument>src/main/proto/ZKFCProtocol.proto</argument>
-                <argument>src/main/proto/ProtobufRpcEngine.proto</argument>
-                <argument>src/main/proto/Security.proto</argument>
-              </arguments>
-            </configuration>
-          </execution>
-          <execution>
-            <id>compile-test-proto</id>
-            <phase>generate-test-sources</phase>
-            <goals>
-              <goal>exec</goal>
-            </goals>
-            <configuration>
-              <executable>protoc</executable>
-              <arguments>
-                <argument>-Isrc/test/proto/</argument>
-                <argument>--java_out=target/generated-test-sources/java</argument>
-                <argument>src/test/proto/test.proto</argument>
-                <argument>src/test/proto/test_rpc_service.proto</argument>
-              </arguments>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>build-helper-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>add-source</id>
-            <phase>generate-sources</phase>
-            <goals>
-              <goal>add-source</goal>
-            </goals>
-            <configuration>
-              <sources>
-                <source>${project.build.directory}/generated-sources/java</source>
-              </sources>
+              <target>
+                <copy toDir="${test.cache.data}">
+                  <fileset dir="${basedir}/src/test/java/org/apache/hadoop/fs">
+                    <include name="test-untar.tar"/>
+                    <include name="test-untar.tgz"/>
+                  </fileset>
+                </copy>
+              </target>
             </configuration>
           </execution>
           <execution>
-            <id>add-test-source</id>
-            <phase>generate-sources</phase>
+            <phase>pre-site</phase>
             <goals>
-              <goal>add-test-source</goal>
+              <goal>run</goal>
             </goals>
             <configuration>
-              <sources>
-                <source>${project.build.directory}/generated-test-sources/java</source>
-              </sources>
+              <tasks>
+                <copy file="src/main/resources/core-default.xml" todir="src/site/resources"/>
+                <copy file="src/main/xsl/configuration.xsl" todir="src/site/resources"/>
+              </tasks>
             </configuration>
           </execution>
         </executions>
@@ -485,6 +446,7 @@
             <exclude>src/test/all-tests</exclude>
             <exclude>src/test/resources/kdc/ldif/users.ldif</exclude>
             <exclude>src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c</exclude>
+            <exclude>src/test/java/org/apache/hadoop/fs/test-untar.tgz</exclude>
           </excludes>
         </configuration>
       </plugin>

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh Sat Feb 16 01:12:07 2013
@@ -83,7 +83,8 @@ fi
 if [ "$command" == "datanode" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
   export HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR
   export HADOOP_LOG_DIR=$HADOOP_SECURE_DN_LOG_DIR
-  export HADOOP_IDENT_STRING=$HADOOP_SECURE_DN_USER   
+  export HADOOP_IDENT_STRING=$HADOOP_SECURE_DN_USER
+  starting_secure_dn="true"
 fi
 
 if [ "$HADOOP_IDENT_STRING" = "" ]; then
@@ -154,7 +155,17 @@ case $startStop in
       ;;
     esac
     echo $! > $pid
-    sleep 1; head "$log"
+    sleep 1
+    # capture the ulimit output
+    if [ "true" = "$starting_secure_dn" ]; then
+      echo "ulimit -a for secure datanode user $HADOOP_SECURE_DN_USER" >> $log
+      # capture the ulimit info for the appropriate user
+      su --shell=/bin/bash $HADOOP_SECURE_DN_USER -c 'ulimit -a' >> $log 2>&1
+    else
+      echo "ulimit -a for user $USER" >> $log
+      ulimit -a >> $log 2>&1
+    fi
+    head -30 "$log"
     sleep 3;
     if ! ps -p $! > /dev/null ; then
       exit 1

Propchange: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1440578-1446830

Propchange: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1440578-1446830

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java Sat Feb 16 01:12:07 2013
@@ -57,70 +57,60 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.util.ShutdownHookManager;
 
 /**
- * The FileContext class provides an interface to the application writer for
- * using the Hadoop file system.
- * It provides a set of methods for the usual operation: create, open, 
- * list, etc 
+ * The FileContext class provides an interface for users of the Hadoop
+ * file system. It exposes a number of file system operations, e.g. create,
+ * open, list.
  * 
- * <p>
- * <b> *** Path Names *** </b>
- * <p>
+ * <h2>Path Names</h2>
  * 
- * The Hadoop file system supports a URI name space and URI names.
- * It offers a forest of file systems that can be referenced using fully
- * qualified URIs.
- * Two common Hadoop file systems implementations are
+ * The Hadoop file system supports a URI namespace and URI names. This enables
+ * multiple types of file systems to be referenced using fully-qualified URIs.
+ * Two common Hadoop file system implementations are
  * <ul>
- * <li> the local file system: file:///path
- * <li> the hdfs file system hdfs://nnAddress:nnPort/path
+ * <li>the local file system: file:///path
+ * <li>the HDFS file system: hdfs://nnAddress:nnPort/path
  * </ul>
  * 
- * While URI names are very flexible, it requires knowing the name or address
- * of the server. For convenience one often wants to access the default system
- * in one's environment without knowing its name/address. This has an
- * additional benefit that it allows one to change one's default fs
- *  (e.g. admin moves application from cluster1 to cluster2).
+ * The Hadoop file system also supports additional naming schemes besides URIs.
+ * Hadoop has the concept of a <i>default file system</i>, which implies a
+ * default URI scheme and authority. This enables <i>slash-relative names</i>
+ * relative to the default FS, which are more convenient for users and
+ * application writers. The default FS is typically set by the user's
+ * environment, though it can also be manually specified.
  * <p>
  * 
- * To facilitate this, Hadoop supports a notion of a default file system.
- * The user can set his default file system, although this is
- * typically set up for you in your environment via your default config.
- * A default file system implies a default scheme and authority; slash-relative
- * names (such as /for/bar) are resolved relative to that default FS.
- * Similarly a user can also have working-directory-relative names (i.e. names
- * not starting with a slash). While the working directory is generally in the
- * same default FS, the wd can be in a different FS.
+ * Hadoop also supports <i>working-directory-relative</i> names, which are paths
+ * relative to the current working directory (similar to Unix). The working
+ * directory can be in a different file system than the default FS.
  * <p>
- *  Hence Hadoop path names can be one of:
- *  <ul>
- *  <li> fully qualified URI: scheme://authority/path
- *  <li> slash relative names: /path relative to the default file system
- *  <li> wd-relative names: path  relative to the working dir
- *  </ul>   
+ * Thus, Hadoop path names can be specified as one of the following:
+ * <ul>
+ * <li>a fully-qualified URI: scheme://authority/path (e.g.
+ * hdfs://nnAddress:nnPort/foo/bar)
+ * <li>a slash-relative name: path relative to the default file system (e.g.
+ * /foo/bar)
+ * <li>a working-directory-relative name: path relative to the working dir (e.g.
+ * foo/bar)
+ * </ul>
  *  Relative paths with scheme (scheme:foo/bar) are illegal.
  *  
- *  <p>
- *  <b>****The Role of the FileContext and configuration defaults****</b>
- *  <p>
- *  The FileContext provides file namespace context for resolving file names;
- *  it also contains the umask for permissions, In that sense it is like the
- *  per-process file-related state in Unix system.
- *  These two properties
- *  <ul> 
- *  <li> default file system i.e your slash)
- *  <li> umask
- *  </ul>
- *  in general, are obtained from the default configuration file
- *  in your environment,  (@see {@link Configuration}).
- *  
- *  No other configuration parameters are obtained from the default config as 
- *  far as the file context layer is concerned. All file system instances
- *  (i.e. deployments of file systems) have default properties; we call these
- *  server side (SS) defaults. Operation like create allow one to select many 
- *  properties: either pass them in as explicit parameters or use
- *  the SS properties.
- *  <p>
- *  The file system related SS defaults are
+ * <h2>Role of FileContext and Configuration Defaults</h2>
+ *
+ * The FileContext is the analogue of per-process file-related state in Unix. It
+ * contains two properties:
+ * 
+ * <ul>
+ * <li>the default file system (for resolving slash-relative names)
+ * <li>the umask (for file permissions)
+ * </ul>
+ * In general, these properties are obtained from the default configuration file
+ * in the user's environment (see {@link Configuration}).
+ * 
+ * Further file system properties are specified on the server-side. File system
+ * operations default to using these server-side defaults unless otherwise
+ * specified.
+ * <p>
+ * The file system related server-side defaults are:
  *  <ul>
  *  <li> the home directory (default is "/user/userName")
  *  <li> the initial wd (only for local fs)
@@ -131,34 +121,34 @@ import org.apache.hadoop.util.ShutdownHo
  *  <li> checksum option. (checksumType and  bytesPerChecksum)
  *  </ul>
  *
- * <p>
- * <b> *** Usage Model for the FileContext class *** </b>
- * <p>
+ * <h2>Example Usage</h2>
+ *
  * Example 1: use the default config read from the $HADOOP_CONFIG/core.xml.
  *   Unspecified values come from core-defaults.xml in the release jar.
  *  <ul>  
  *  <li> myFContext = FileContext.getFileContext(); // uses the default config
  *                                                // which has your default FS 
  *  <li>  myFContext.create(path, ...);
- *  <li>  myFContext.setWorkingDir(path)
+ *  <li>  myFContext.setWorkingDir(path);
  *  <li>  myFContext.open (path, ...);  
+ *  <li>...
  *  </ul>  
  * Example 2: Get a FileContext with a specific URI as the default FS
  *  <ul>  
- *  <li> myFContext = FileContext.getFileContext(URI)
+ *  <li> myFContext = FileContext.getFileContext(URI);
  *  <li> myFContext.create(path, ...);
- *   ...
- * </ul> 
+ *  <li>...
+ * </ul>
  * Example 3: FileContext with local file system as the default
  *  <ul> 
- *  <li> myFContext = FileContext.getLocalFSFileContext()
+ *  <li> myFContext = FileContext.getLocalFSFileContext();
  *  <li> myFContext.create(path, ...);
  *  <li> ...
  *  </ul> 
  * Example 4: Use a specific config, ignoring $HADOOP_CONFIG
  *  Generally you should not need use a config unless you are doing
  *   <ul> 
- *   <li> configX = someConfigSomeOnePassedToYou.
+ *   <li> configX = someConfigSomeOnePassedToYou;
  *   <li> myFContext = getFileContext(configX); // configX is not changed,
  *                                              // is passed down 
  *   <li> myFContext.create(path, ...);

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java Sat Feb 16 01:12:07 2013
@@ -21,9 +21,12 @@ package org.apache.hadoop.fs;
 import java.io.*;
 import java.util.Arrays;
 import java.util.Enumeration;
+import java.util.zip.GZIPInputStream;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipFile;
 
+import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
+import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -624,14 +627,28 @@ public class FileUtil {
    * @throws IOException
    */
   public static void unTar(File inFile, File untarDir) throws IOException {
-    if (!untarDir.mkdirs()) {           
+    if (!untarDir.mkdirs()) {
       if (!untarDir.isDirectory()) {
         throw new IOException("Mkdirs failed to create " + untarDir);
       }
     }
 
-    StringBuilder untarCommand = new StringBuilder();
     boolean gzipped = inFile.toString().endsWith("gz");
+    if(Shell.WINDOWS) {
+      // Tar is not native to Windows. Use simple Java based implementation for 
+      // tests and simple tar archives
+      unTarUsingJava(inFile, untarDir, gzipped);
+    }
+    else {
+      // spawn tar utility to untar archive for full fledged unix behavior such 
+      // as resolving symlinks in tar archives
+      unTarUsingTar(inFile, untarDir, gzipped);
+    }
+  }
+  
+  private static void unTarUsingTar(File inFile, File untarDir,
+      boolean gzipped) throws IOException {
+    StringBuffer untarCommand = new StringBuffer();
     if (gzipped) {
       untarCommand.append(" gzip -dc '");
       untarCommand.append(FileUtil.makeShellPath(inFile));
@@ -656,7 +673,62 @@ public class FileUtil {
                   ". Tar process exited with exit code " + exitcode);
     }
   }
+  
+  private static void unTarUsingJava(File inFile, File untarDir,
+      boolean gzipped) throws IOException {
+    InputStream inputStream = null;
+    if (gzipped) {
+      inputStream = new BufferedInputStream(new GZIPInputStream(
+          new FileInputStream(inFile)));
+    } else {
+      inputStream = new BufferedInputStream(new FileInputStream(inFile));
+    }
+
+    TarArchiveInputStream tis = new TarArchiveInputStream(inputStream);
+
+    for (TarArchiveEntry entry = tis.getNextTarEntry(); entry != null;) {
+      unpackEntries(tis, entry, untarDir);
+      entry = tis.getNextTarEntry();
+    }
+  }
+  
+  private static void unpackEntries(TarArchiveInputStream tis,
+      TarArchiveEntry entry, File outputDir) throws IOException {
+    if (entry.isDirectory()) {
+      File subDir = new File(outputDir, entry.getName());
+      if (!subDir.mkdir() && !subDir.isDirectory()) {
+        throw new IOException("Mkdirs failed to create tar internal dir "
+            + outputDir);
+      }
+
+      for (TarArchiveEntry e : entry.getDirectoryEntries()) {
+        unpackEntries(tis, e, subDir);
+      }
 
+      return;
+    }
+
+    File outputFile = new File(outputDir, entry.getName());
+    if (!outputDir.exists()) {
+      if (!outputDir.mkdirs()) {
+        throw new IOException("Mkdirs failed to create tar internal dir "
+            + outputDir);
+      }
+    }
+
+    int count;
+    byte data[] = new byte[2048];
+    BufferedOutputStream outputStream = new BufferedOutputStream(
+        new FileOutputStream(outputFile));
+
+    while ((count = tis.read(data)) != -1) {
+      outputStream.write(data, 0, count);
+    }
+
+    outputStream.flush();
+    outputStream.close();
+  }
+  
   /**
    * Class for creating hardlinks.
    * Supports Unix, Cygwin, WindXP.

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java Sat Feb 16 01:12:07 2013
@@ -30,8 +30,11 @@ import java.util.TreeMap;
 import java.util.HashMap;
 import java.util.concurrent.ConcurrentHashMap;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.Progressable;
@@ -50,6 +53,9 @@ import org.apache.hadoop.util.Progressab
  */
 
 public class HarFileSystem extends FilterFileSystem {
+
+  private static final Log LOG = LogFactory.getLog(HarFileSystem.class);
+
   public static final int VERSION = 3;
 
   private static final Map<URI, HarMetaData> harMetaCache =
@@ -1025,68 +1031,69 @@ public class HarFileSystem extends Filte
     }
 
     private void parseMetaData() throws IOException {
-      FSDataInputStream in = fs.open(masterIndexPath);
-      FileStatus masterStat = fs.getFileStatus(masterIndexPath);
-      masterIndexTimestamp = masterStat.getModificationTime();
-      LineReader lin = new LineReader(in, getConf());
-      Text line = new Text();
-      long read = lin.readLine(line);
-
-     // the first line contains the version of the index file
-      String versionLine = line.toString();
-      String[] arr = versionLine.split(" ");
-      version = Integer.parseInt(arr[0]);
-      // make it always backwards-compatible
-      if (this.version > HarFileSystem.VERSION) {
-        throw new IOException("Invalid version " + 
-            this.version + " expected " + HarFileSystem.VERSION);
-      }
-
-      // each line contains a hashcode range and the index file name
-      String[] readStr = null;
-      while(read < masterStat.getLen()) {
-        int b = lin.readLine(line);
-        read += b;
-        readStr = line.toString().split(" ");
-        int startHash = Integer.parseInt(readStr[0]);
-        int endHash  = Integer.parseInt(readStr[1]);
-        stores.add(new Store(Long.parseLong(readStr[2]), 
-            Long.parseLong(readStr[3]), startHash,
-            endHash));
-        line.clear();
-      }
+      Text line;
+      long read;
+      FSDataInputStream in = null;
+      LineReader lin = null;
+
       try {
-        // close the master index
-        lin.close();
-      } catch(IOException io){
-        // do nothing just a read.
-      }
+        in = fs.open(masterIndexPath);
+        FileStatus masterStat = fs.getFileStatus(masterIndexPath);
+        masterIndexTimestamp = masterStat.getModificationTime();
+        lin = new LineReader(in, getConf());
+        line = new Text();
+        read = lin.readLine(line);
+
+        // the first line contains the version of the index file
+        String versionLine = line.toString();
+        String[] arr = versionLine.split(" ");
+        version = Integer.parseInt(arr[0]);
+        // make it always backwards-compatible
+        if (this.version > HarFileSystem.VERSION) {
+          throw new IOException("Invalid version " + 
+              this.version + " expected " + HarFileSystem.VERSION);
+        }
 
-      FSDataInputStream aIn = fs.open(archiveIndexPath);
-      FileStatus archiveStat = fs.getFileStatus(archiveIndexPath);
-      archiveIndexTimestamp = archiveStat.getModificationTime();
-      LineReader aLin;
-
-      // now start reading the real index file
-      for (Store s: stores) {
-        read = 0;
-        aIn.seek(s.begin);
-        aLin = new LineReader(aIn, getConf());
-        while (read + s.begin < s.end) {
-          int tmp = aLin.readLine(line);
-          read += tmp;
-          String lineFeed = line.toString();
-          String[] parsed = lineFeed.split(" ");
-          parsed[0] = decodeFileName(parsed[0]);
-          archive.put(new Path(parsed[0]), new HarStatus(lineFeed));
+        // each line contains a hashcode range and the index file name
+        String[] readStr = null;
+        while(read < masterStat.getLen()) {
+          int b = lin.readLine(line);
+          read += b;
+          readStr = line.toString().split(" ");
+          int startHash = Integer.parseInt(readStr[0]);
+          int endHash  = Integer.parseInt(readStr[1]);
+          stores.add(new Store(Long.parseLong(readStr[2]), 
+              Long.parseLong(readStr[3]), startHash,
+              endHash));
           line.clear();
         }
+      } finally {
+        IOUtils.cleanup(LOG, lin, in);
       }
+
+      FSDataInputStream aIn = fs.open(archiveIndexPath);
       try {
-        // close the archive index
-        aIn.close();
-      } catch(IOException io) {
-        // do nothing just a read.
+        FileStatus archiveStat = fs.getFileStatus(archiveIndexPath);
+        archiveIndexTimestamp = archiveStat.getModificationTime();
+        LineReader aLin;
+
+        // now start reading the real index file
+        for (Store s: stores) {
+          read = 0;
+          aIn.seek(s.begin);
+          aLin = new LineReader(aIn, getConf());
+          while (read + s.begin < s.end) {
+            int tmp = aLin.readLine(line);
+            read += tmp;
+            String lineFeed = line.toString();
+            String[] parsed = lineFeed.split(" ");
+            parsed[0] = decodeFileName(parsed[0]);
+            archive.put(new Path(parsed[0]), new HarStatus(lineFeed));
+            line.clear();
+          }
+        }
+      } finally {
+        IOUtils.cleanup(LOG, aIn);
       }
     }
   }

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java Sat Feb 16 01:12:07 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.fs.shell;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.LinkedList;
+import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -28,6 +29,7 @@ import org.apache.hadoop.fs.PathIOExcept
 import org.apache.hadoop.fs.PathIsDirectoryException;
 import org.apache.hadoop.fs.PathIsNotDirectoryException;
 import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
+import org.apache.hadoop.fs.PathNotFoundException;
 import org.apache.hadoop.fs.Trash;
 
 /**
@@ -72,6 +74,19 @@ class Delete {
     }
 
     @Override
+    protected List<PathData> expandArgument(String arg) throws IOException {
+      try {
+        return super.expandArgument(arg);
+      } catch (PathNotFoundException e) {
+        if (!ignoreFNF) {
+          throw e;
+        }
+        // prevent -f on a non-existent glob from failing
+        return new LinkedList<PathData>();
+      }
+    }
+
+    @Override
     protected void processNonexistentPath(PathData item) throws IOException {
       if (!ignoreFNF) super.processNonexistentPath(item);
     }

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java Sat Feb 16 01:12:07 2013
@@ -48,7 +48,7 @@ class FsUsage extends FsCommand {
   
   protected String formatSize(long size) {
     return humanReadable
-        ? StringUtils.humanReadableInt(size)
+        ? StringUtils.TraditionalBinaryPrefix.long2String(size, "", 1)
         : String.valueOf(size);
   }
 

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java Sat Feb 16 01:12:07 2013
@@ -67,7 +67,7 @@ class Ls extends FsCommand {
   protected boolean humanReadable = false;
   protected String formatSize(long size) {
     return humanReadable
-      ? StringUtils.humanReadableInt(size)
+      ? StringUtils.TraditionalBinaryPrefix.long2String(size, "", 1)
       : String.valueOf(size);
   }
 

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java Sat Feb 16 01:12:07 2013
@@ -29,6 +29,8 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 
+import com.google.common.annotations.VisibleForTesting;
+
 /**
  * Abstract base class for MapWritable and SortedMapWritable
  * 
@@ -45,10 +47,12 @@ public abstract class AbstractMapWritabl
   private AtomicReference<Configuration> conf;
   
   /* Class to id mappings */
-  private Map<Class, Byte> classToIdMap = new ConcurrentHashMap<Class, Byte>();
+  @VisibleForTesting
+  Map<Class, Byte> classToIdMap = new ConcurrentHashMap<Class, Byte>();
   
   /* Id to Class mappings */
-  private Map<Byte, Class> idToClassMap = new ConcurrentHashMap<Byte, Class>();
+  @VisibleForTesting
+  Map<Byte, Class> idToClassMap = new ConcurrentHashMap<Byte, Class>();
   
   /* The number of new classes (those not established by the constructor) */
   private volatile byte newClasses = 0;

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java Sat Feb 16 01:12:07 2013
@@ -32,9 +32,10 @@ import org.apache.hadoop.classification.
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Unstable
 public class BoundedByteArrayOutputStream extends OutputStream {
-  private final byte[] buffer;
+  private byte[] buffer;
+  private int startOffset;
   private int limit;
-  private int count;
+  private int currentPointer;
 
   /**
    * Create a BoundedByteArrayOutputStream with the specified
@@ -52,20 +53,30 @@ public class BoundedByteArrayOutputStrea
    * @param limit The maximum limit upto which data can be written
    */
   public BoundedByteArrayOutputStream(int capacity, int limit) {
+    this(new byte[capacity], 0, limit);
+  }
+
+  protected BoundedByteArrayOutputStream(byte[] buf, int offset, int limit) {
+    resetBuffer(buf, offset, limit);
+  }
+  
+  protected void resetBuffer(byte[] buf, int offset, int limit) {
+    int capacity = buf.length - offset;
     if ((capacity < limit) || (capacity | limit) < 0) {
       throw new IllegalArgumentException("Invalid capacity/limit");
     }
-    this.buffer = new byte[capacity];
-    this.limit = limit;
-    this.count = 0;
+    this.buffer = buf;
+    this.startOffset = offset;
+    this.currentPointer = offset;
+    this.limit = offset + limit;
   }
-
+  
   @Override
   public void write(int b) throws IOException {
-    if (count >= limit) {
+    if (currentPointer >= limit) {
       throw new EOFException("Reaching the limit of the buffer.");
     }
-    buffer[count++] = (byte) b;
+    buffer[currentPointer++] = (byte) b;
   }
 
   @Override
@@ -77,12 +88,12 @@ public class BoundedByteArrayOutputStrea
       return;
     }
 
-    if (count + len > limit) {
+    if (currentPointer + len > limit) {
       throw new EOFException("Reach the limit of the buffer");
     }
 
-    System.arraycopy(b, off, buffer, count, len);
-    count += len;
+    System.arraycopy(b, off, buffer, currentPointer, len);
+    currentPointer += len;
   }
 
   /**
@@ -90,17 +101,17 @@ public class BoundedByteArrayOutputStrea
    * @param newlim New Limit
    */
   public void reset(int newlim) {
-    if (newlim > buffer.length) {
+    if (newlim > (buffer.length - startOffset)) {
       throw new IndexOutOfBoundsException("Limit exceeds buffer size");
     }
     this.limit = newlim;
-    this.count = 0;
+    this.currentPointer = startOffset;
   }
 
   /** Reset the buffer */
   public void reset() {
-    this.limit = buffer.length;
-    this.count = 0;
+    this.limit = buffer.length - startOffset;
+    this.currentPointer = startOffset;
   }
 
   /** Return the current limit */
@@ -119,6 +130,10 @@ public class BoundedByteArrayOutputStrea
    * currently in the buffer.
    */
   public int size() {
-    return count;
+    return currentPointer - startOffset;
+  }
+  
+  public int available() {
+    return limit - currentPointer;
   }
 }

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java Sat Feb 16 01:12:07 2013
@@ -141,7 +141,7 @@ public class SortedMapWritable extends A
     for (Map.Entry<? extends WritableComparable, ? extends Writable> e:
       t.entrySet()) {
       
-      instance.put(e.getKey(), e.getValue());
+      put(e.getKey(), e.getValue());
     }
   }
 
@@ -203,4 +203,27 @@ public class SortedMapWritable extends A
       e.getValue().write(out);
     }
   }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+
+    if (obj instanceof SortedMapWritable) {
+      Map map = (Map) obj;
+      if (size() != map.size()) {
+        return false;
+      }
+
+      return entrySet().equals(map.entrySet());
+    }
+
+    return false;
+  }
+
+  @Override
+  public int hashCode() {
+    return instance.hashCode();
+  }
 }

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/Errno.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/Errno.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/Errno.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/Errno.java Sat Feb 16 01:12:07 2013
@@ -55,6 +55,9 @@ public enum Errno {
   EPIPE,
   EDOM,
   ERANGE,
+  ELOOP,
+  ENAMETOOLONG,
+  ENOTEMPTY,
 
   UNKNOWN;
 }

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java Sat Feb 16 01:12:07 2013
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.io.nativeio;
 
+import java.io.File;
 import java.io.FileDescriptor;
 import java.io.IOException;
 import java.util.Map;
@@ -293,4 +294,35 @@ public class NativeIO {
     stat.group = getName(IdCache.GROUP, stat.groupId);
     return stat;
   }
+  
+  /**
+   * A version of renameTo that throws a descriptive exception when it fails.
+   *
+   * @param src                  The source path
+   * @param dst                  The destination path
+   * 
+   * @throws NativeIOException   On failure.
+   */
+  public static void renameTo(File src, File dst)
+      throws IOException {
+    if (!nativeLoaded) {
+      if (!src.renameTo(dst)) {
+        throw new IOException("renameTo(src=" + src + ", dst=" +
+          dst + ") failed.");
+      }
+    } else {
+      renameTo0(src.getAbsolutePath(), dst.getAbsolutePath());
+    }
+  }
+
+  /**
+   * A version of renameTo that throws a descriptive exception when it fails.
+   *
+   * @param src                  The source path
+   * @param dst                  The destination path
+   * 
+   * @throws NativeIOException   On failure.
+   */
+  private static native void renameTo0(String src, String dst)
+      throws NativeIOException;
 }

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java Sat Feb 16 01:12:07 2013
@@ -35,6 +35,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.StandbyException;
+import org.apache.hadoop.net.ConnectTimeoutException;
 
 /**
  * <p>
@@ -543,6 +544,7 @@ public class RetryPolicies {
           e instanceof NoRouteToHostException ||
           e instanceof UnknownHostException ||
           e instanceof StandbyException ||
+          e instanceof ConnectTimeoutException ||
           isWrappedStandbyException(e)) {
         return new RetryAction(
             RetryAction.RetryDecision.FAILOVER_AND_RETRY,

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Sat Feb 16 01:12:07 2013
@@ -67,6 +67,7 @@ import org.apache.hadoop.ipc.protobuf.Rp
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
+import org.apache.hadoop.net.ConnectTimeoutException;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SaslRpcClient;
@@ -511,14 +512,14 @@ public class Client {
           }
           this.socket.setSoTimeout(pingInterval);
           return;
-        } catch (SocketTimeoutException toe) {
+        } catch (ConnectTimeoutException toe) {
           /* Check for an address change and update the local reference.
            * Reset the failure counter if the address was changed
            */
           if (updateAddress()) {
             timeoutFailures = ioFailures = 0;
           }
-          handleConnectionFailure(timeoutFailures++,
+          handleConnectionTimeout(timeoutFailures++,
               maxRetriesOnSocketTimeouts, toe);
         } catch (IOException ie) {
           if (updateAddress()) {
@@ -680,7 +681,7 @@ public class Client {
       socket = null;
     }
 
-    /* Handle connection failures
+    /* Handle connection failures due to timeout on connect
      *
      * If the current number of retries is equal to the max number of retries,
      * stop retrying and throw the exception; Otherwise backoff 1 second and
@@ -694,7 +695,7 @@ public class Client {
      * @param ioe failure reason
      * @throws IOException if max number of retries is reached
      */
-    private void handleConnectionFailure(
+    private void handleConnectionTimeout(
         int curRetries, int maxRetries, IOException ioe) throws IOException {
 
       closeConnection();

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java Sat Feb 16 01:12:07 2013
@@ -62,7 +62,7 @@ public class ProtobufRpcEngine implement
   
   static { // Register the rpcRequest deserializer for WritableRpcEngine 
     org.apache.hadoop.ipc.Server.registerProtocolEngine(
-        RPC.RpcKind.RPC_PROTOCOL_BUFFER, RpcRequestWritable.class,
+        RPC.RpcKind.RPC_PROTOCOL_BUFFER, RpcRequestWrapper.class,
         new Server.ProtoBufRpcInvoker());
   }
 
@@ -122,7 +122,7 @@ public class ProtobufRpcEngine implement
     public Invoker(Class<?> protocol, Client.ConnectionId connId,
         Configuration conf, SocketFactory factory) {
       this.remoteId = connId;
-      this.client = CLIENTS.getClient(conf, factory, RpcResponseWritable.class);
+      this.client = CLIENTS.getClient(conf, factory, RpcResponseWrapper.class);
       this.protocolName = RPC.getProtocolName(protocol);
       this.clientProtocolVersion = RPC
           .getProtocolVersion(protocol);
@@ -191,7 +191,7 @@ public class ProtobufRpcEngine implement
       }
 
       RequestProto rpcRequest = constructRpcRequest(method, args);
-      RpcResponseWritable val = null;
+      RpcResponseWrapper val = null;
       
       if (LOG.isTraceEnabled()) {
         LOG.trace(Thread.currentThread().getId() + ": Call -> " +
@@ -199,8 +199,8 @@ public class ProtobufRpcEngine implement
             " {" + TextFormat.shortDebugString((Message) args[1]) + "}");
       }
       try {
-        val = (RpcResponseWritable) client.call(RPC.RpcKind.RPC_PROTOCOL_BUFFER,
-            new RpcRequestWritable(rpcRequest), remoteId);
+        val = (RpcResponseWrapper) client.call(RPC.RpcKind.RPC_PROTOCOL_BUFFER,
+            new RpcRequestWrapper(rpcRequest), remoteId);
 
       } catch (Throwable e) {
         if (LOG.isTraceEnabled()) {
@@ -268,16 +268,20 @@ public class ProtobufRpcEngine implement
   }
 
   /**
-   * Writable Wrapper for Protocol Buffer Requests
+   * Wrapper for Protocol Buffer Requests
+   * 
+   * Note while this wrapper is writable, the request on the wire is in
+   * Protobuf. Several methods on {@link org.apache.hadoop.ipc.Server and RPC} 
+   * use type Writable as a wrapper to work across multiple RpcEngine kinds.
    */
-  private static class RpcRequestWritable implements Writable {
+  private static class RpcRequestWrapper implements Writable {
     RequestProto message;
 
     @SuppressWarnings("unused")
-    public RpcRequestWritable() {
+    public RpcRequestWrapper() {
     }
 
-    RpcRequestWritable(RequestProto message) {
+    RpcRequestWrapper(RequestProto message) {
       this.message = message;
     }
 
@@ -303,16 +307,20 @@ public class ProtobufRpcEngine implement
   }
 
   /**
-   * Writable Wrapper for Protocol Buffer Responses
+   *  Wrapper for Protocol Buffer Responses
+   * 
+   * Note while this wrapper is writable, the request on the wire is in
+   * Protobuf. Several methods on {@link org.apache.hadoop.ipc.Server and RPC} 
+   * use type Writable as a wrapper to work across multiple RpcEngine kinds.
    */
-  private static class RpcResponseWritable implements Writable {
+  private static class RpcResponseWrapper implements Writable {
     byte[] responseMessage;
 
     @SuppressWarnings("unused")
-    public RpcResponseWritable() {
+    public RpcResponseWrapper() {
     }
 
-    public RpcResponseWritable(Message message) {
+    public RpcResponseWrapper(Message message) {
       this.responseMessage = message.toByteArray();
     }
 
@@ -336,7 +344,7 @@ public class ProtobufRpcEngine implement
   @InterfaceStability.Unstable
   static Client getClient(Configuration conf) {
     return CLIENTS.getClient(conf, SocketFactory.getDefault(),
-        RpcResponseWritable.class);
+        RpcResponseWrapper.class);
   }
   
  
@@ -425,7 +433,7 @@ public class ProtobufRpcEngine implement
        */
       public Writable call(RPC.Server server, String connectionProtocolName,
           Writable writableRequest, long receiveTime) throws Exception {
-        RpcRequestWritable request = (RpcRequestWritable) writableRequest;
+        RpcRequestWrapper request = (RpcRequestWrapper) writableRequest;
         RequestProto rpcRequest = request.message;
         String methodName = rpcRequest.getMethodName();
         
@@ -487,7 +495,7 @@ public class ProtobufRpcEngine implement
         } catch (Exception e) {
           throw e;
         }
-        return new RpcResponseWritable(result);
+        return new RpcResponseWrapper(result);
       }
     }
   }

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java Sat Feb 16 01:12:07 2013
@@ -20,6 +20,7 @@ package org.apache.hadoop.net;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.lang.reflect.Constructor;
 import java.net.BindException;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
@@ -517,11 +518,15 @@ public class NetUtils {
       socket.bind(localAddr);
     }
 
-    if (ch == null) {
-      // let the default implementation handle it.
-      socket.connect(endpoint, timeout);
-    } else {
-      SocketIOWithTimeout.connect(ch, endpoint, timeout);
+    try {
+      if (ch == null) {
+        // let the default implementation handle it.
+        socket.connect(endpoint, timeout);
+      } else {
+        SocketIOWithTimeout.connect(ch, endpoint, timeout);
+      }
+    } catch (SocketTimeoutException ste) {
+      throw new ConnectTimeoutException(ste.getMessage());
     }
 
     // There is a very rare case allowed by the TCP specification, such that
@@ -719,7 +724,7 @@ public class NetUtils {
               + see("BindException"));
     } else if (exception instanceof ConnectException) {
       // connection refused; include the host:port in the error
-      return (ConnectException) new ConnectException(
+      return wrapWithMessage(exception, 
           "Call From "
               + localHost
               + " to "
@@ -729,32 +734,28 @@ public class NetUtils {
               + " failed on connection exception: "
               + exception
               + ";"
-              + see("ConnectionRefused"))
-          .initCause(exception);
+              + see("ConnectionRefused"));
     } else if (exception instanceof UnknownHostException) {
-      return (UnknownHostException) new UnknownHostException(
+      return wrapWithMessage(exception,
           "Invalid host name: "
               + getHostDetailsAsString(destHost, destPort, localHost)
               + exception
               + ";"
-              + see("UnknownHost"))
-          .initCause(exception);
+              + see("UnknownHost"));
     } else if (exception instanceof SocketTimeoutException) {
-      return (SocketTimeoutException) new SocketTimeoutException(
+      return wrapWithMessage(exception,
           "Call From "
               + localHost + " to " + destHost + ":" + destPort
               + " failed on socket timeout exception: " + exception
               + ";"
-              + see("SocketTimeout"))
-          .initCause(exception);
+              + see("SocketTimeout"));
     } else if (exception instanceof NoRouteToHostException) {
-      return (NoRouteToHostException) new NoRouteToHostException(
+      return wrapWithMessage(exception,
           "No Route to Host from  "
               + localHost + " to " + destHost + ":" + destPort
               + " failed on socket timeout exception: " + exception
               + ";"
-              + see("NoRouteToHost"))
-          .initCause(exception);
+              + see("NoRouteToHost"));
     }
     else {
       return (IOException) new IOException("Failed on local exception: "
@@ -769,6 +770,21 @@ public class NetUtils {
   private static String see(final String entry) {
     return FOR_MORE_DETAILS_SEE + HADOOP_WIKI + entry;
   }
+  
+  @SuppressWarnings("unchecked")
+  private static <T extends IOException> T wrapWithMessage(
+      T exception, String msg) {
+    Class<? extends Throwable> clazz = exception.getClass();
+    try {
+      Constructor<? extends Throwable> ctor = clazz.getConstructor(String.class);
+      Throwable t = ctor.newInstance(msg);
+      return (T)(t.initCause(exception));
+    } catch (Throwable e) {
+      LOG.warn("Unable to wrap exception of type " +
+          clazz + ": it has no (String) constructor", e);
+      return exception;
+    }
+  }
 
   /**
    * Get the host details as a string

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Sat Feb 16 01:12:07 2013
@@ -301,17 +301,26 @@ public class UserGroupInformation {
   
   private static String OS_LOGIN_MODULE_NAME;
   private static Class<? extends Principal> OS_PRINCIPAL_CLASS;
+  
   private static final boolean windows =
       System.getProperty("os.name").startsWith("Windows");
   private static final boolean is64Bit =
       System.getProperty("os.arch").contains("64");
+  private static final boolean ibmJava = System.getProperty("java.vendor").contains("IBM");
+  private static final boolean aix = System.getProperty("os.name").equals("AIX");
+
   /* Return the OS login module class name */
   private static String getOSLoginModuleName() {
-    if (System.getProperty("java.vendor").contains("IBM")) {
-      return windows ? (is64Bit
-          ? "com.ibm.security.auth.module.Win64LoginModule"
-          : "com.ibm.security.auth.module.NTLoginModule")
-        : "com.ibm.security.auth.module.LinuxLoginModule";
+    if (ibmJava) {
+      if (windows) {
+        return is64Bit ? "com.ibm.security.auth.module.Win64LoginModule"
+            : "com.ibm.security.auth.module.NTLoginModule";
+      } else if (aix) {
+        return is64Bit ? "com.ibm.security.auth.module.AIX64LoginModule"
+            : "com.ibm.security.auth.module.AIXLoginModule";
+      } else {
+        return "com.ibm.security.auth.module.LinuxLoginModule";
+      }
     } else {
       return windows ? "com.sun.security.auth.module.NTLoginModule"
         : "com.sun.security.auth.module.UnixLoginModule";
@@ -323,21 +332,24 @@ public class UserGroupInformation {
   private static Class<? extends Principal> getOsPrincipalClass() {
     ClassLoader cl = ClassLoader.getSystemClassLoader();
     try {
-      if (System.getProperty("java.vendor").contains("IBM")) {
-        if (windows) {
-          return (Class<? extends Principal>) (is64Bit
-            ? cl.loadClass("com.ibm.security.auth.UsernamePrincipal")
-            : cl.loadClass("com.ibm.security.auth.NTUserPrincipal"));
+      String principalClass = null;
+      if (ibmJava) {
+        if (is64Bit) {
+          principalClass = "com.ibm.security.auth.UsernamePrincipal";
         } else {
-          return (Class<? extends Principal>) (is64Bit
-            ? cl.loadClass("com.ibm.security.auth.UsernamePrincipal")
-            : cl.loadClass("com.ibm.security.auth.LinuxPrincipal"));
+          if (windows) {
+            principalClass = "com.ibm.security.auth.NTUserPrincipal";
+          } else if (aix) {
+            principalClass = "com.ibm.security.auth.AIXPrincipal";
+          } else {
+            principalClass = "com.ibm.security.auth.LinuxPrincipal";
+          }
         }
       } else {
-        return (Class<? extends Principal>) (windows
-           ? cl.loadClass("com.sun.security.auth.NTUserPrincipal")
-           : cl.loadClass("com.sun.security.auth.UnixPrincipal"));
+        principalClass = windows ? "com.sun.security.auth.NTUserPrincipal"
+            : "com.sun.security.auth.UnixPrincipal";
       }
+      return (Class<? extends Principal>) cl.loadClass(principalClass);
     } catch (ClassNotFoundException e) {
       LOG.error("Unable to find JAAS classes:" + e.getMessage());
     }
@@ -418,12 +430,21 @@ public class UserGroupInformation {
     private static final Map<String,String> USER_KERBEROS_OPTIONS = 
       new HashMap<String,String>();
     static {
-      USER_KERBEROS_OPTIONS.put("doNotPrompt", "true");
-      USER_KERBEROS_OPTIONS.put("useTicketCache", "true");
-      USER_KERBEROS_OPTIONS.put("renewTGT", "true");
+      if (ibmJava) {
+        USER_KERBEROS_OPTIONS.put("useDefaultCcache", "true");
+      } else {
+        USER_KERBEROS_OPTIONS.put("doNotPrompt", "true");
+        USER_KERBEROS_OPTIONS.put("useTicketCache", "true");
+        USER_KERBEROS_OPTIONS.put("renewTGT", "true");
+      }
       String ticketCache = System.getenv("KRB5CCNAME");
       if (ticketCache != null) {
-        USER_KERBEROS_OPTIONS.put("ticketCache", ticketCache);
+        if (ibmJava) {
+          // The first value searched when "useDefaultCcache" is used.
+          System.setProperty("KRB5CCNAME", ticketCache);
+        } else {
+          USER_KERBEROS_OPTIONS.put("ticketCache", ticketCache);
+        }
       }
       USER_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
     }
@@ -434,10 +455,14 @@ public class UserGroupInformation {
     private static final Map<String,String> KEYTAB_KERBEROS_OPTIONS = 
       new HashMap<String,String>();
     static {
-      KEYTAB_KERBEROS_OPTIONS.put("doNotPrompt", "true");
-      KEYTAB_KERBEROS_OPTIONS.put("useKeyTab", "true");
-      KEYTAB_KERBEROS_OPTIONS.put("storeKey", "true");
-      KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
+      if (ibmJava) {
+        KEYTAB_KERBEROS_OPTIONS.put("credsType", "both");
+      } else {
+        KEYTAB_KERBEROS_OPTIONS.put("doNotPrompt", "true");
+        KEYTAB_KERBEROS_OPTIONS.put("useKeyTab", "true");
+        KEYTAB_KERBEROS_OPTIONS.put("storeKey", "true");
+        KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
+      }
       KEYTAB_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);      
     }
     private static final AppConfigurationEntry KEYTAB_KERBEROS_LOGIN =
@@ -462,7 +487,12 @@ public class UserGroupInformation {
       } else if (USER_KERBEROS_CONFIG_NAME.equals(appName)) {
         return USER_KERBEROS_CONF;
       } else if (KEYTAB_KERBEROS_CONFIG_NAME.equals(appName)) {
-        KEYTAB_KERBEROS_OPTIONS.put("keyTab", keytabFile);
+        if (ibmJava) {
+          KEYTAB_KERBEROS_OPTIONS.put("useKeytab",
+              prependFileAuthority(keytabFile));
+        } else {
+          KEYTAB_KERBEROS_OPTIONS.put("keyTab", keytabFile);
+        }
         KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
         return KEYTAB_KERBEROS_CONF;
       }
@@ -470,6 +500,11 @@ public class UserGroupInformation {
     }
   }
 
+  private static String prependFileAuthority(String keytabPath) {
+    return keytabPath.startsWith("file://") ? keytabPath
+        : "file://" + keytabPath;
+  }
+
   /**
    * Represents a javax.security configuration that is created at runtime.
    */
@@ -666,6 +701,7 @@ public class UserGroupInformation {
         }
         loginUser.spawnAutoRenewalThreadForUserCreds();
       } catch (LoginException le) {
+        LOG.debug("failure to login", le);
         throw new IOException("failure to login", le);
       }
       if (LOG.isDebugEnabled()) {

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java Sat Feb 16 01:12:07 2013
@@ -268,7 +268,12 @@ public class GenericOptionsParser {
     }
 
     if (line.hasOption("jt")) {
-      conf.set("mapred.job.tracker", line.getOptionValue("jt"), 
+      String optionValue = line.getOptionValue("jt");
+      if (optionValue.equalsIgnoreCase("local")) {
+        conf.set("mapreduce.framework.name", optionValue);
+      }
+
+      conf.set("yarn.resourcemanager.address", optionValue, 
           "from -jt command line option");
     }
     if (line.hasOption("conf")) {

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java Sat Feb 16 01:12:07 2013
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.util;
 
+import java.io.Closeable;
 import java.io.IOException;
 import java.io.InputStream;
 
@@ -39,7 +40,7 @@ import org.apache.hadoop.io.Text;
  */
 @InterfaceAudience.LimitedPrivate({"MapReduce"})
 @InterfaceStability.Unstable
-public class LineReader {
+public class LineReader implements Closeable {
   private static final int DEFAULT_BUFFER_SIZE = 64 * 1024;
   private int bufferSize = DEFAULT_BUFFER_SIZE;
   private InputStream in;

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java Sat Feb 16 01:12:07 2013
@@ -23,8 +23,6 @@ import java.io.StringWriter;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.text.DateFormat;
-import java.text.DecimalFormat;
-import java.text.NumberFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -34,12 +32,13 @@ import java.util.List;
 import java.util.Locale;
 import java.util.StringTokenizer;
 
-import com.google.common.net.InetAddresses;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.net.NetUtils;
 
+import com.google.common.net.InetAddresses;
+
 /**
  * General string utils
  */
@@ -52,13 +51,6 @@ public class StringUtils {
    */
   public static final int SHUTDOWN_HOOK_PRIORITY = 0;
 
-  private static final DecimalFormat decimalFormat;
-  static {
-          NumberFormat numberFormat = NumberFormat.getNumberInstance(Locale.ENGLISH);
-          decimalFormat = (DecimalFormat) numberFormat;
-          decimalFormat.applyPattern("#.##");
-  }
-
   /**
    * Make a string representation of the exception.
    * @param e The exception to stringify
@@ -87,50 +79,33 @@ public class StringUtils {
     }
     return fullHostname;
   }
-
-  private static DecimalFormat oneDecimal = new DecimalFormat("0.0");
   
   /**
    * Given an integer, return a string that is in an approximate, but human 
    * readable format. 
-   * It uses the bases 'k', 'm', and 'g' for 1024, 1024**2, and 1024**3.
    * @param number the number to format
    * @return a human readable form of the integer
+   *
+   * @deprecated use {@link TraditionalBinaryPrefix#long2String(long, String, int)}.
    */
+  @Deprecated
   public static String humanReadableInt(long number) {
-    long absNumber = Math.abs(number);
-    double result = number;
-    String suffix = "";
-    if (absNumber < 1024) {
-      // since no division has occurred, don't format with a decimal point
-      return String.valueOf(number);
-    } else if (absNumber < 1024 * 1024) {
-      result = number / 1024.0;
-      suffix = "k";
-    } else if (absNumber < 1024 * 1024 * 1024) {
-      result = number / (1024.0 * 1024);
-      suffix = "m";
-    } else {
-      result = number / (1024.0 * 1024 * 1024);
-      suffix = "g";
-    }
-    return oneDecimal.format(result) + suffix;
+    return TraditionalBinaryPrefix.long2String(number, "", 1);
   }
-  
+
+  /** The same as String.format(Locale.ENGLISH, format, objects). */
+  public static String format(final String format, final Object... objects) {
+    return String.format(Locale.ENGLISH, format, objects);
+  }
+
   /**
    * Format a percentage for presentation to the user.
-   * @param done the percentage to format (0.0 to 1.0)
-   * @param digits the number of digits past the decimal point
+   * @param fraction the percentage as a fraction, e.g. 0.1 = 10%
+   * @param decimalPlaces the number of decimal places
    * @return a string representation of the percentage
    */
-  public static String formatPercent(double done, int digits) {
-    DecimalFormat percentFormat = new DecimalFormat("0.00%");
-    double scale = Math.pow(10.0, digits+2);
-    double rounded = Math.floor(done * scale);
-    percentFormat.setDecimalSeparatorAlwaysShown(false);
-    percentFormat.setMinimumFractionDigits(digits);
-    percentFormat.setMaximumFractionDigits(digits);
-    return percentFormat.format(rounded / scale);
+  public static String formatPercent(double fraction, int decimalPlaces) {
+    return format("%." + decimalPlaces + "f%%", fraction*100);
   }
   
   /**
@@ -165,7 +140,7 @@ public class StringUtils {
     }
     StringBuilder s = new StringBuilder(); 
     for(int i = start; i < end; i++) {
-      s.append(String.format("%02x", bytes[i]));
+      s.append(format("%02x", bytes[i]));
     }
     return s.toString();
   }
@@ -630,18 +605,22 @@ public class StringUtils {
    * TraditionalBinaryPrefix symbol are case insensitive. 
    */
   public static enum TraditionalBinaryPrefix {
-    KILO(1024),
-    MEGA(KILO.value << 10),
-    GIGA(MEGA.value << 10),
-    TERA(GIGA.value << 10),
-    PETA(TERA.value << 10),
-    EXA(PETA.value << 10);
+    KILO(10),
+    MEGA(KILO.bitShift + 10),
+    GIGA(MEGA.bitShift + 10),
+    TERA(GIGA.bitShift + 10),
+    PETA(TERA.bitShift + 10),
+    EXA (PETA.bitShift + 10);
 
     public final long value;
     public final char symbol;
+    public final int bitShift;
+    public final long bitMask;
 
-    TraditionalBinaryPrefix(long value) {
-      this.value = value;
+    private TraditionalBinaryPrefix(int bitShift) {
+      this.bitShift = bitShift;
+      this.value = 1L << bitShift;
+      this.bitMask = this.value - 1L;
       this.symbol = toString().charAt(0);
     }
 
@@ -692,8 +671,58 @@ public class StringUtils {
         return num * prefix;
       }
     }
+
+    /**
+     * Convert a long integer to a string with traditional binary prefix.
+     * 
+     * @param n the value to be converted
+     * @param unit The unit, e.g. "B" for bytes.
+     * @param decimalPlaces The number of decimal places.
+     * @return a string with traditional binary prefix.
+     */
+    public static String long2String(long n, String unit, int decimalPlaces) {
+      if (unit == null) {
+        unit = "";
+      }
+      //take care a special case
+      if (n == Long.MIN_VALUE) {
+        return "-8 " + EXA.symbol + unit;
+      }
+
+      final StringBuilder b = new StringBuilder();
+      //take care negative numbers
+      if (n < 0) {
+        b.append('-');
+        n = -n;
+      }
+      if (n < KILO.value) {
+        //no prefix
+        b.append(n);
+        return (unit.isEmpty()? b: b.append(" ").append(unit)).toString();
+      } else {
+        //find traditional binary prefix
+        int i = 0;
+        for(; i < values().length && n >= values()[i].value; i++);
+        TraditionalBinaryPrefix prefix = values()[i - 1];
+
+        if ((n & prefix.bitMask) == 0) {
+          //exact division
+          b.append(n >> prefix.bitShift);
+        } else {
+          final String  format = "%." + decimalPlaces + "f";
+          String s = format(format, n/(double)prefix.value);
+          //check a special rounding up case
+          if (s.startsWith("1024")) {
+            prefix = values()[i];
+            s = format(format, n/(double)prefix.value);
+          }
+          b.append(s);
+        }
+        return b.append(' ').append(prefix.symbol).append(unit).toString();
+      }
+    }
   }
-  
+
     /**
      * Escapes HTML Special characters present in the string.
      * @param string
@@ -731,32 +760,16 @@ public class StringUtils {
     }
 
   /**
-   * Return an abbreviated English-language desc of the byte length
+   * @return a byte description of the given long interger value.
    */
   public static String byteDesc(long len) {
-    double val = 0.0;
-    String ending = "";
-    if (len < 1024 * 1024) {
-      val = (1.0 * len) / 1024;
-      ending = " KB";
-    } else if (len < 1024 * 1024 * 1024) {
-      val = (1.0 * len) / (1024 * 1024);
-      ending = " MB";
-    } else if (len < 1024L * 1024 * 1024 * 1024) {
-      val = (1.0 * len) / (1024 * 1024 * 1024);
-      ending = " GB";
-    } else if (len < 1024L * 1024 * 1024 * 1024 * 1024) {
-      val = (1.0 * len) / (1024L * 1024 * 1024 * 1024);
-      ending = " TB";
-    } else {
-      val = (1.0 * len) / (1024L * 1024 * 1024 * 1024 * 1024);
-      ending = " PB";
-    }
-    return limitDecimalTo2(val) + ending;
+    return TraditionalBinaryPrefix.long2String(len, "B", 2);
   }
 
-  public static synchronized String limitDecimalTo2(double d) {
-    return decimalFormat.format(d);
+  /** @deprecated use StringUtils.format("%.2f", d). */
+  @Deprecated
+  public static String limitDecimalTo2(double d) {
+    return format("%.2f", d);
   }
   
   /**

Modified: hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1446832&r1=1446831&r2=1446832&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (original)
+++ hadoop/common/branches/HDFS-347/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Sat Feb 16 01:12:07 2013
@@ -24,11 +24,12 @@
 #include <grp.h>
 #include <jni.h>
 #include <pwd.h>
+#include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
 #include <sys/stat.h>
-#include <sys/types.h>
 #include <sys/syscall.h>
+#include <sys/types.h>
 #include <unistd.h>
 
 #include "config.h"
@@ -502,6 +503,26 @@ ssize_t get_pw_buflen() {
   #endif
   return (ret > 512) ? ret : 512;
 }
+
+JNIEXPORT void JNICALL 
+Java_org_apache_hadoop_io_nativeio_NativeIO_renameTo0(JNIEnv *env, 
+jclass clazz, jstring jsrc, jstring jdst)
+{
+  const char *src = NULL, *dst = NULL;
+  
+  src = (*env)->GetStringUTFChars(env, jsrc, NULL);
+  if (!src) goto done; // exception was thrown
+  dst = (*env)->GetStringUTFChars(env, jdst, NULL);
+  if (!dst) goto done; // exception was thrown
+  if (rename(src, dst)) {
+    throw_ioe(env, errno);
+  }
+
+done:
+  if (src) (*env)->ReleaseStringUTFChars(env, jsrc, src);
+  if (dst) (*env)->ReleaseStringUTFChars(env, jdst, dst);
+}
+
 /**
  * vim: sw=2: ts=2: et:
  */



Mime
View raw message