hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sur...@apache.org
Subject svn commit: r1390763 [1/3] - in /hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common: ./ src/ src/contrib/bash-tab-completion/ src/main/bin/ src/main/docs/ src/main/docs/src/documentation/content/xdocs/ src/main/java/ src/main/j...
Date Wed, 26 Sep 2012 22:55:16 GMT
Author: suresh
Date: Wed Sep 26 22:55:00 2012
New Revision: 1390763

URL: http://svn.apache.org/viewvc?rev=1390763&view=rev
Log:
Merging all the trunk changes into branch-trunk-win branch

Added:
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/test/
      - copied from r1390762, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/test/
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/test/org/
      - copied from r1390762, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/test/org/
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/
      - copied from r1390762, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/
      - copied from r1390762, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/
      - copied from r1390762, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c
      - copied unchanged from r1390762, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java
      - copied unchanged from r1390762, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestTimedOutTestsListener.java
      - copied unchanged from r1390762, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestTimedOutTestsListener.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java
      - copied unchanged from r1390762, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java
Modified:
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/JNIFlags.cmake
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/contrib/bash-tab-completion/hadoop.sh
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/file_system_shell.xml
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3InputStream.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFCRpcServer.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsDynamicMBeanBase.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableQuantiles.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestSerializationFactory.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt Wed Sep 26 22:55:00 2012
@@ -1,6 +1,6 @@
 Hadoop Change Log
 
-Trunk (unreleased changes)
+Trunk (Unreleased)
 
   INCOMPATIBLE CHANGES
 
@@ -101,6 +101,22 @@ Trunk (unreleased changes)
     HADOOP-8619. WritableComparator must implement no-arg constructor.
     (Chris Douglas via Suresh)
 
+    HADOOP-8814. Replace string equals "" by String#isEmpty().
+    (Brandon Li via suresh)
+
+    HADOOP-8588. SerializationFactory shouldn't throw a
+    NullPointerException if the serializations list is empty.
+    (Sho Shimauchi via harsh)
+
+    HADOOP-7930. Kerberos relogin interval in UserGroupInformation
+    should be configurable (Robert Kanter via harsh)
+
+    HADOOP-8838. Colorize the test-patch output sent to JIRA (Harsh J via
+    bobby)
+
+    HADOOP-8840. Fix the test-patch colorizer to cover all sorts of +1 lines.
+    (Harsh J via bobby)
+
   BUG FIXES
 
     HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName.
@@ -197,11 +213,85 @@ Trunk (unreleased changes)
     HADOOP-8623. hadoop jar command should respect HADOOP_OPTS.
     (Steven Willis via suresh)
 
+    HADOOP-8684. Deadlock between WritableComparator and WritableComparable.
+    (Jing Zhao via suresh)
+
+    HADOOP-8786. HttpServer continues to start even if AuthenticationFilter
+    fails to init (todd)
+
+    HADOOP-8767. Secondary namenode is started on slave nodes instead of
+    master nodes. (Giovanni Delussu via suresh)
+
+    HADOOP-8818. Use equals instead == in MD5MD5CRC32FileChecksum
+    and TFileDumper. (Brandon Li via suresh)
+
+    HADOOP-8821. Fix findbugs warning related to concatenating string in a 
+    for loop in Configuration#dumpDeprecatedKeys(). (suresh)
+
+    HADOOP-7256. Resource leak during failure scenario of closing
+    of resources. (Ramkrishna S. Vasudevan via harsh)
+
+    HADOOP-8151. Error handling in snappy decompressor throws invalid
+    exceptions. (Matt Foley via harsh)
+
+    HADOOP-8813. Add InterfaceAudience and InterfaceStability annotations
+    to RPC Server and Client classes. (Brandon Li via suresh)
+
+    HADOOP-8815. RandomDatum needs to override hashCode().
+    (Brandon Li via suresh)
+
+    HADOOP-8436. NPE In getLocalPathForWrite ( path, conf ) when the
+    required context item is not configured
+    (Brahma Reddy Battula via harsh)
+
+    HADOOP-3957. Change MutableQuantiles to use a shared thread for rolling
+    over metrics. (Andrew Wang via todd)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
 
-Branch-2 ( Unreleased changes )
+Release 2.0.3-alpha - Unreleased 
+
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+
+    HADOOP-8597. Permit FsShell's text command to read Avro files.
+    (Ivan Vladimirov Ivanov via cutting)
+
+  IMPROVEMENTS
+
+    HADOOP-8789. Tests setLevel(Level.OFF) should be Level.ERROR.
+    (Andy Isaacson via eli)
+
+    HADOOP-8755. Print thread dump when tests fail due to timeout. (Andrey
+    Klochkov via atm)
+
+    HADOOP-8806. libhadoop.so: dlopen should be better at locating
+    libsnappy.so, etc. (Colin Patrick McCabe via eli)
+
+    HADOOP-8812. ExitUtil#terminate should print Exception#toString. (eli)
+
+    HADOOP-8736. Add Builder for building RPC server. (Brandon Li via Suresh)
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
+    HADOOP-8795. BASH tab completion doesn't look in PATH, assumes path to
+    executable is specified. (Sean Mackrory via atm)
+
+    HADOOP-8780. Update DeprecatedProperties apt file. (Ahmed Radwan via
+    tomwhite)
+
+    HADOOP-8833. fs -text should make sure to call inputstream.seek(0)
+    before using input stream. (tomwhite and harsh)
+
+    HADOOP-8791. Fix rm command documentation to indicte it deletes
+    files and not directories. (Jing Zhao via suresh)
+
+Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES
 
@@ -211,6 +301,8 @@ Branch-2 ( Unreleased changes )
     HADOOP-8689. Make trash a server side configuration option. (eli)
 
     HADOOP-8710. Remove ability for users to easily run the trash emptire. (eli)
+    
+    HADOOP-8794. Rename YARN_HOME to HADOOP_YARN_HOME. (vinodkv via acmurthy)
 
   NEW FEATURES
  
@@ -323,6 +415,14 @@ Branch-2 ( Unreleased changes )
     HADOOP-8748. Refactor DFSClient retry utility methods to a new class
     in org.apache.hadoop.io.retry.  (Arun C Murthy via szetszwo)
 
+    HADOOP-8754. Deprecate all the RPC.getServer() variants.  (Brandon Li
+    via szetszwo)
+
+    HADOOP-8801. ExitUtil#terminate should capture the exception stack trace. (eli)
+
+    HADOOP-8819. Incorrectly & is used instead of && in some file system 
+    implementations. (Brandon Li via suresh)
+
   BUG FIXES
 
     HADOOP-8372. NetUtils.normalizeHostName() incorrectly handles hostname
@@ -455,6 +555,30 @@ Branch-2 ( Unreleased changes )
 
     HADOOP-8747. Syntax error on cmake version 2.6 patch 2 in JNIFlags.cmake. (cmccabe via tucu)
 
+    HADOOP-8722. Update BUILDING.txt with latest snappy info.
+    (Colin Patrick McCabe via eli)
+
+    HADOOP-8764. CMake: HADOOP-8737 broke ARM build. (Trevor Robinson via eli)
+
+    HADOOP-8770. NN should not RPC to self to find trash defaults. (eli)
+
+    HADOOP-8648. libhadoop: native CRC32 validation crashes when
+    io.bytes.per.checksum=1. (Colin Patrick McCabe via eli)
+
+    HADOOP-8766. FileContextMainOperationsBaseTest should randomize the root
+    dir. (Colin Patrick McCabe via atm)
+
+    HADOOP-8749. HADOOP-8031 changed the way in which relative xincludes are handled in 
+    Configuration. (ahmed via tucu)
+
+    HADOOP-8431. Running distcp wo args throws IllegalArgumentException.
+    (Sandy Ryza via eli)
+
+    HADOOP-8775. MR2 distcp permits non-positive value to -bandwidth option
+    which causes job never to complete. (Sandy Ryza via atm)
+
+    HADOOP-8781. hadoop-config.sh should add JAVA_LIBRARY_PATH to LD_LIBRARY_PATH. (tucu)
+
   BREAKDOWN OF HDFS-3042 SUBTASKS
 
     HADOOP-8220. ZKFailoverController doesn't handle failure to become active
@@ -857,6 +981,23 @@ Release 2.0.0-alpha - 05-23-2012
     HADOOP-8655. Fix TextInputFormat for large deliminators. (Gelesh via
     bobby) 
 
+Release 0.23.4 - UNRELEASED
+
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+
+  IMPROVEMENTS
+
+    HADOOP-8822. relnotes.py was deleted post mavenization (bobby)
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
+    HADOOP-8843. Old trash directories are never deleted on upgrade
+    from 1.x (jlowe)
+
 Release 0.23.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -996,6 +1137,9 @@ Release 0.23.3 - UNRELEASED
     HADOOP-8726. The Secrets in Credentials are not available to MR tasks
     (daryn and Benoy Antony via bobby)
 
+    HADOOP-8727. Gracefully deprecate dfs.umaskmode in 2.x onwards (Harsh J
+    via bobby)
+
 Release 0.23.2 - UNRELEASED 
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1379224-1390762

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml Wed Sep 26 22:55:00 2012
@@ -535,6 +535,20 @@
                   </target>
                 </configuration>
               </execution>
+              <execution>
+                <id>native_tests</id>
+                <phase>test</phase>
+                <goals><goal>run</goal></goals>
+                <configuration>
+                  <target>
+                    <exec executable="sh" failonerror="true" dir="${project.build.directory}/native">
+                      <arg value="-c"/>
+                      <arg value="[ x$SKIPTESTS = xtrue ] || ${project.build.directory}/native/test_bulk_crc32"/>
+                      <env key="SKIPTESTS" value="${skipTests}"/>
+                    </exec>
+                  </target>
+                </configuration>
+              </execution>
             </executions>
           </plugin>
         </plugins>

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt Wed Sep 26 22:55:00 2012
@@ -60,6 +60,7 @@ find_package(ZLIB REQUIRED)
 set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -Wall -O2")
 set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_REENTRANT -D_FILE_OFFSET_BITS=64")
 set(D main/native/src/org/apache/hadoop)
+set(T main/native/src/test/org/apache/hadoop)
 
 GET_FILENAME_COMPONENT(HADOOP_ZLIB_LIBRARY ${ZLIB_LIBRARIES} NAME)
 
@@ -98,9 +99,17 @@ include_directories(
     ${JNI_INCLUDE_DIRS}
     ${ZLIB_INCLUDE_DIRS}
     ${SNAPPY_INCLUDE_DIR}
+    ${D}/util
 )
 CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h)
 
+add_executable(test_bulk_crc32
+    ${D}/util/bulk_crc32.c
+    ${T}/util/test_bulk_crc32.c
+)
+set_property(SOURCE main.cpp PROPERTY INCLUDE_DIRECTORIES "\"-Werror\" \"-Wall\"")
+
+SET(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
 add_dual_library(hadoop
     ${D}/io/compress/lz4/Lz4Compressor.c
     ${D}/io/compress/lz4/Lz4Decompressor.c
@@ -117,6 +126,17 @@ add_dual_library(hadoop
     ${D}/util/NativeCrc32.c
     ${D}/util/bulk_crc32.c
 )
+
+IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
+    #
+    # By embedding '$ORIGIN' into the RPATH of libhadoop.so,
+    # dlopen will look in the directory containing libhadoop.so.
+    # However, $ORIGIN is not supported by all operating systems.
+    #
+    SET_TARGET_PROPERTIES(hadoop 
+        PROPERTIES INSTALL_RPATH "\$ORIGIN/")
+ENDIF()
+
 target_link_dual_libraries(hadoop
     dl
     ${JAVA_JVM_LIBRARY}

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/JNIFlags.cmake
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/JNIFlags.cmake?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/JNIFlags.cmake (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/JNIFlags.cmake Wed Sep 26 22:55:00 2012
@@ -76,6 +76,8 @@ IF("${CMAKE_SYSTEM}" MATCHES "Linux")
         SET(_java_libarch "i386")
     ELSEIF (CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
         SET(_java_libarch "amd64")
+    ELSEIF (CMAKE_SYSTEM_PROCESSOR MATCHES "^arm")
+        SET(_java_libarch "arm")
     ELSE()
         SET(_java_libarch ${CMAKE_SYSTEM_PROCESSOR})
     ENDIF()

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/contrib/bash-tab-completion/hadoop.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/contrib/bash-tab-completion/hadoop.sh?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/contrib/bash-tab-completion/hadoop.sh (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/contrib/bash-tab-completion/hadoop.sh Wed Sep 26 22:55:00 2012
@@ -26,7 +26,7 @@ _hadoop() {
   COMPREPLY=()
   cur=${COMP_WORDS[COMP_CWORD]}
   prev=${COMP_WORDS[COMP_CWORD-1]}  
-  script=${COMP_WORDS[0]}  
+  script=`which ${COMP_WORDS[0]}`
   
   # Bash lets you tab complete things even if the script doesn't
   # exist (or isn't executable). Check to make sure it is, as we

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh Wed Sep 26 22:55:00 2012
@@ -74,6 +74,10 @@ fi
 
 export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_PREFIX/$DEFAULT_CONF_DIR}"
 
+if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
+  . "${HADOOP_CONF_DIR}/hadoop-env.sh"
+fi
+
 # User can specify hostnames or a file where the hostnames are (not both)
 if [[ ( "$HADOOP_SLAVES" != '' ) && ( "$HADOOP_SLAVE_NAMES" != '' ) ]] ; then
   echo \
@@ -113,9 +117,6 @@ case "`uname`" in
 CYGWIN*) cygwin=true;;
 esac
 
-if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
-  . "${HADOOP_CONF_DIR}/hadoop-env.sh"
-fi
 
 # check if net.ipv6.bindv6only is set to 1
 bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
@@ -243,6 +244,7 @@ HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.st
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
 if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
   HADOOP_OPTS="$HADOOP_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
+  export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$JAVA_LIBRARY_PATH
 fi  
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.policy.file=$HADOOP_POLICYFILE"
 
@@ -267,21 +269,21 @@ fi
 CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/$HDFS_DIR'/*'
 
 # put yarn in classpath if present
-if [ "$YARN_HOME" = "" ]; then
+if [ "$HADOOP_YARN_HOME" = "" ]; then
   if [ -d "${HADOOP_PREFIX}/$YARN_DIR" ]; then
-    export YARN_HOME=$HADOOP_PREFIX
+    export HADOOP_YARN_HOME=$HADOOP_PREFIX
   fi
 fi
 
-if [ -d "$YARN_HOME/$YARN_DIR/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$YARN_HOME/$YARN_DIR
+if [ -d "$HADOOP_YARN_HOME/$YARN_DIR/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/$YARN_DIR
 fi
 
-if [ -d "$YARN_HOME/$YARN_LIB_JARS_DIR" ]; then
-  CLASSPATH=${CLASSPATH}:$YARN_HOME/$YARN_LIB_JARS_DIR'/*'
+if [ -d "$HADOOP_YARN_HOME/$YARN_LIB_JARS_DIR" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/$YARN_LIB_JARS_DIR'/*'
 fi
 
-CLASSPATH=${CLASSPATH}:$YARN_HOME/$YARN_DIR'/*'
+CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/$YARN_DIR'/*'
 
 # put mapred in classpath if present AND different from YARN
 if [ "$HADOOP_MAPRED_HOME" = "" ]; then
@@ -290,7 +292,7 @@ if [ "$HADOOP_MAPRED_HOME" = "" ]; then
   fi
 fi
 
-if [ "$HADOOP_MAPRED_HOME/$MAPRED_DIR" != "$YARN_HOME/$YARN_DIR" ] ; then
+if [ "$HADOOP_MAPRED_HOME/$MAPRED_DIR" != "$HADOOP_YARN_HOME/$YARN_DIR" ] ; then
   if [ -d "$HADOOP_MAPRED_HOME/$MAPRED_DIR/webapps" ]; then
     CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/$MAPRED_DIR
   fi

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh Wed Sep 26 22:55:00 2012
@@ -42,9 +42,6 @@ DEFAULT_LIBEXEC_DIR="$bin"/../libexec
 HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
 . $HADOOP_LIBEXEC_DIR/hadoop-config.sh
 
-if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
-  . "${HADOOP_CONF_DIR}/hadoop-env.sh"
-fi
 
 # Where to start the script, see hadoop-config.sh
 # (it set up the variables based on command line options)

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh Wed Sep 26 22:55:00 2012
@@ -33,6 +33,6 @@ if [ -f "${HADOOP_HDFS_HOME}"/sbin/start
 fi
 
 # start yarn daemons if yarn is present
-if [ -f "${YARN_HOME}"/sbin/start-yarn.sh ]; then
-  "${YARN_HOME}"/sbin/start-yarn.sh --config $HADOOP_CONF_DIR
+if [ -f "${HADOOP_YARN_HOME}"/sbin/start-yarn.sh ]; then
+  "${HADOOP_YARN_HOME}"/sbin/start-yarn.sh --config $HADOOP_CONF_DIR
 fi

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1379224-1390762

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/file_system_shell.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/file_system_shell.xml?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/file_system_shell.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/file_system_shell.xml Wed Sep 26 22:55:00 2012
@@ -418,15 +418,15 @@
 				<code>Usage: hdfs dfs -rm [-skipTrash] URI [URI &#x2026;] </code>
 			</p>
 			<p>
-	   Delete files specified as args. Only deletes non empty directory and files. If the <code>-skipTrash</code> option
+	   Delete files specified as args. Only deletes files. If the <code>-skipTrash</code> option
 	   is specified, the trash, if enabled, will be bypassed and the specified file(s) deleted immediately.  	This can be
 		   useful when it is necessary to delete files from an over-quota directory.
-	   Refer to rmr for recursive deletes.<br/>
+	   Use -rm -r or rmr for recursive deletes.<br/>
 	   Example:
 	   </p>
 			<ul>
 				<li>
-					<code> hdfs dfs -rm hdfs://nn.example.com/file /user/hadoop/emptydir </code>
+					<code> hdfs dfs -rm hdfs://nn.example.com/file </code>
 				</li>
 			</ul>
 			<p>Exit Code:</p>
@@ -442,7 +442,7 @@
 			<p>
 				<code>Usage: hdfs dfs -rmr [-skipTrash] URI [URI &#x2026;]</code>
 			</p>
-			<p>Recursive version of delete. If the <code>-skipTrash</code> option
+			<p>Recursive version of delete. The rmr command recursively deletes the directory and any content under it. If the <code>-skipTrash</code> option
 		   is specified, the trash, if enabled, will be bypassed and the specified file(s) deleted immediately. This can be
 		   useful when it is necessary to delete files from an over-quota directory.<br/>
 	   Example:

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1379224-1390762

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Wed Sep 26 22:55:00 2012
@@ -1073,7 +1073,7 @@ public class Configuration implements It
    */
   public boolean getBoolean(String name, boolean defaultValue) {
     String valueString = getTrimmed(name);
-    if (null == valueString || "".equals(valueString)) {
+    if (null == valueString || valueString.isEmpty()) {
       return defaultValue;
     }
 
@@ -1140,7 +1140,7 @@ public class Configuration implements It
    */
   public Pattern getPattern(String name, Pattern defaultValue) {
     String valString = get(name);
-    if (null == valString || "".equals(valString)) {
+    if (null == valString || valString.isEmpty()) {
       return defaultValue;
     }
     try {
@@ -1871,11 +1871,11 @@ public class Configuration implements It
     if (url == null) {
       return null;
     }
-    return parse(builder, url.openStream());
+    return parse(builder, url.openStream(), url.toString());
   }
 
-  private Document parse(DocumentBuilder builder, InputStream is)
-      throws IOException, SAXException {
+  private Document parse(DocumentBuilder builder, InputStream is,
+      String systemId) throws IOException, SAXException {
     if (!quietmode) {
       LOG.info("parsing input stream " + is);
     }
@@ -1883,7 +1883,8 @@ public class Configuration implements It
       return null;
     }
     try {
-      return builder.parse(is);
+      return (systemId == null) ? builder.parse(is) : builder.parse(is,
+          systemId);
     } finally {
       is.close();
     }
@@ -1951,10 +1952,11 @@ public class Configuration implements It
           if (!quiet) {
             LOG.info("parsing File " + file);
           }
-          doc = parse(builder, new BufferedInputStream(new FileInputStream(file)));
+          doc = parse(builder, new BufferedInputStream(
+              new FileInputStream(file)), ((Path)resource).toString());
         }
       } else if (resource instanceof InputStream) {
-        doc = parse(builder, (InputStream) resource);
+        doc = parse(builder, (InputStream) resource, null);
         returnCachedProperties = true;
       } else if (resource instanceof Properties) {
         overlay(properties, (Properties)resource);
@@ -2324,11 +2326,23 @@ public class Configuration implements It
                new String[]{CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY});
     Configuration.addDeprecation("fs.default.name", 
                new String[]{CommonConfigurationKeys.FS_DEFAULT_NAME_KEY});
+    Configuration.addDeprecation("dfs.umaskmode",
+        new String[]{CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY});
   }
   
   /**
    * A unique class which is used as a sentinel value in the caching
-   * for getClassByName. {@see Configuration#getClassByNameOrNull(String)}
+   * for getClassByName. {@link Configuration#getClassByNameOrNull(String)}
    */
   private static abstract class NegativeCacheSentinel {}
+
+  public static void dumpDeprecatedKeys() {
+    for (Map.Entry<String, DeprecatedKeyInfo> entry : deprecatedKeyMap.entrySet()) {
+      StringBuilder newKeys = new StringBuilder();
+      for (String newKey : entry.getValue().newKeys) {
+        newKeys.append(newKey).append("\t");
+      }
+      System.out.println(entry.getKey() + "\t" + newKeys.toString());
+    }
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java Wed Sep 26 22:55:00 2012
@@ -153,9 +153,9 @@ public class ReconfigurationServlet exte
           StringEscapeUtils.unescapeHtml(req.getParameter(rawParam));
         if (value != null) {
           if (value.equals(newConf.getRaw(param)) || value.equals("default") ||
-              value.equals("null") || value.equals("")) {
+              value.equals("null") || value.isEmpty()) {
             if ((value.equals("default") || value.equals("null") || 
-                 value.equals("")) && 
+                 value.isEmpty()) && 
                 oldConf.getRaw(param) != null) {
               out.println("<p>Changed \"" + 
                           StringEscapeUtils.escapeHtml(param) + "\" from \"" +
@@ -163,7 +163,7 @@ public class ReconfigurationServlet exte
                           "\" to default</p>");
               reconf.reconfigureProperty(param, null);
             } else if (!value.equals("default") && !value.equals("null") &&
-                       !value.equals("") && 
+                       !value.isEmpty() && 
                        (oldConf.getRaw(param) == null || 
                         !oldConf.getRaw(param).equals(value))) {
               // change from default or value to different value

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java Wed Sep 26 22:55:00 2012
@@ -242,5 +242,11 @@ public class CommonConfigurationKeysPubl
   public static final String HADOOP_SSL_ENABLED_KEY = "hadoop.ssl.enabled";
   public static final boolean HADOOP_SSL_ENABLED_DEFAULT = false;
 
+  /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+  public static final String HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN =
+          "hadoop.kerberos.min.seconds.before.relogin";
+  /** Default value for HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN */
+  public static final int HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT =
+          60;
 }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java Wed Sep 26 22:55:00 2012
@@ -2003,7 +2003,7 @@ public final class FileContext {
       String filename = inPathPattern.toUri().getPath();
       
       // path has only zero component
-      if ("".equals(filename) || Path.SEPARATOR.equals(filename)) {
+      if (filename.isEmpty() || Path.SEPARATOR.equals(filename)) {
         Path p = inPathPattern.makeQualified(uri, null);
         return getFileStatus(new Path[]{p});
       }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java Wed Sep 26 22:55:00 2012
@@ -1597,7 +1597,7 @@ public abstract class FileSystem extends
     String filename = pathPattern.toUri().getPath();
     
     // path has only zero component
-    if ("".equals(filename) || Path.SEPARATOR.equals(filename)) {
+    if (filename.isEmpty() || Path.SEPARATOR.equals(filename)) {
       return getFileStatus(new Path[]{pathPattern});
     }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java Wed Sep 26 22:55:00 2012
@@ -265,6 +265,9 @@ public class LocalDirAllocator {
     private synchronized void confChanged(Configuration conf) 
         throws IOException {
       String newLocalDirs = conf.get(contextCfgItemName);
+      if (null == newLocalDirs) {
+        throw new IOException(contextCfgItemName + " not configured");
+      }
       if (!newLocalDirs.equals(savedLocalDirs)) {
         localDirs = StringUtils.getTrimmedStrings(newLocalDirs);
         localFS = FileSystem.getLocal(conf);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java Wed Sep 26 22:55:00 2012
@@ -133,7 +133,7 @@ public class MD5MD5CRC32FileChecksum ext
 
     try {
       // old versions don't support crcType.
-      if (crcType == null || crcType == "") {
+      if (crcType == null || crcType.equals("")) {
         finalCrcType = DataChecksum.Type.CRC32;
       } else {
         finalCrcType = DataChecksum.Type.valueOf(crcType);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java Wed Sep 26 22:55:00 2012
@@ -68,7 +68,7 @@ public class Path implements Comparable 
     // Add a slash to parent's path so resolution is compatible with URI's
     URI parentUri = parent.uri;
     String parentPath = parentUri.getPath();
-    if (!(parentPath.equals("/") || parentPath.equals(""))) {
+    if (!(parentPath.equals("/") || parentPath.isEmpty())) {
       try {
         parentUri = new URI(parentUri.getScheme(), parentUri.getAuthority(),
                       parentUri.getPath()+"/", null, parentUri.getFragment());

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java Wed Sep 26 22:55:00 2012
@@ -492,7 +492,7 @@ public class RawLocalFileSystem extends 
      * onwer.equals("").
      */
     private boolean isPermissionLoaded() {
-      return !super.getOwner().equals(""); 
+      return !super.getOwner().isEmpty(); 
     }
     
     RawLocalFileStatus(File f, long defaultBlockSize, FileSystem fs) {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java Wed Sep 26 22:55:00 2012
@@ -68,8 +68,26 @@ public class Trash extends Configured {
   public static boolean moveToAppropriateTrash(FileSystem fs, Path p,
       Configuration conf) throws IOException {
     Path fullyResolvedPath = fs.resolvePath(p);
-    Trash trash = new Trash(FileSystem.get(fullyResolvedPath.toUri(), conf), conf);
-    boolean success =  trash.moveToTrash(fullyResolvedPath);
+    FileSystem fullyResolvedFs =
+        FileSystem.get(fullyResolvedPath.toUri(), conf);
+    // If the trash interval is configured server side then clobber this
+    // configuration so that we always respect the server configuration.
+    try {
+      long trashInterval = fullyResolvedFs.getServerDefaults(
+          fullyResolvedPath).getTrashInterval();
+      if (0 != trashInterval) {
+        Configuration confCopy = new Configuration(conf);
+        confCopy.setLong(CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY,
+            trashInterval);
+        conf = confCopy;
+      }
+    } catch (Exception e) {
+      // If we can not determine that trash is enabled server side then
+      // bail rather than potentially deleting a file when trash is enabled.
+      throw new IOException("Failed to get server trash configuration", e);
+    }
+    Trash trash = new Trash(fullyResolvedFs, conf);
+    boolean success = trash.moveToTrash(fullyResolvedPath);
     if (success) {
       System.out.println("Moved: '" + p + "' to trash at: " +
           trash.getCurrentTrashDir() );

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java Wed Sep 26 22:55:00 2012
@@ -61,6 +61,9 @@ public class TrashPolicyDefault extends 
     new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE);
 
   private static final DateFormat CHECKPOINT = new SimpleDateFormat("yyMMddHHmmss");
+  /** Format of checkpoint directories used prior to Hadoop 0.23. */
+  private static final DateFormat OLD_CHECKPOINT =
+      new SimpleDateFormat("yyMMddHHmm");
   private static final int MSECS_PER_MINUTE = 60*1000;
 
   private Path current;
@@ -69,8 +72,9 @@ public class TrashPolicyDefault extends 
 
   public TrashPolicyDefault() { }
 
-  private TrashPolicyDefault(Path home, Configuration conf) throws IOException {
-    initialize(conf, home.getFileSystem(conf), home);
+  private TrashPolicyDefault(FileSystem fs, Path home, Configuration conf)
+      throws IOException {
+    initialize(conf, fs, home);
   }
 
   @Override
@@ -79,24 +83,9 @@ public class TrashPolicyDefault extends 
     this.trash = new Path(home, TRASH);
     this.homesParent = home.getParent();
     this.current = new Path(trash, CURRENT);
-    long trashInterval = 0;
-    try {
-      trashInterval = fs.getServerDefaults(home).getTrashInterval();
-    } catch (IOException ioe) {
-      LOG.warn("Unable to get server defaults", ioe);
-    }
-    // If the trash interval is not configured or is disabled on the
-    // server side then check the config which may be client side.
-    if (0 == trashInterval) {
-      this.deletionInterval = (long)(conf.getFloat(
-          FS_TRASH_INTERVAL_KEY, FS_TRASH_INTERVAL_DEFAULT)
-          * MSECS_PER_MINUTE);
-    } else {
-      this.deletionInterval = trashInterval * MSECS_PER_MINUTE;
-    }
-    // For the checkpoint interval use the given config instead of
-    // checking the server as it's OK if a client starts an emptier
-    // with a different interval than the server.
+    this.deletionInterval = (long)(conf.getFloat(
+        FS_TRASH_INTERVAL_KEY, FS_TRASH_INTERVAL_DEFAULT)
+        * MSECS_PER_MINUTE);
     this.emptierInterval = (long)(conf.getFloat(
         FS_TRASH_CHECKPOINT_INTERVAL_KEY, FS_TRASH_CHECKPOINT_INTERVAL_DEFAULT)
         * MSECS_PER_MINUTE);
@@ -217,9 +206,7 @@ public class TrashPolicyDefault extends 
 
       long time;
       try {
-        synchronized (CHECKPOINT) {
-          time = CHECKPOINT.parse(name).getTime();
-        }
+        time = getTimeFromCheckpoint(name);
       } catch (ParseException e) {
         LOG.warn("Unexpected item in trash: "+dir+". Ignoring.");
         continue;
@@ -293,7 +280,8 @@ public class TrashPolicyDefault extends 
               if (!home.isDirectory())
                 continue;
               try {
-                TrashPolicyDefault trash = new TrashPolicyDefault(home.getPath(), conf);
+                TrashPolicyDefault trash = new TrashPolicyDefault(
+                    fs, home.getPath(), conf);
                 trash.deleteCheckpoint();
                 trash.createCheckpoint();
               } catch (IOException e) {
@@ -319,4 +307,22 @@ public class TrashPolicyDefault extends 
       return (time / interval) * interval;
     }
   }
+
+  private long getTimeFromCheckpoint(String name) throws ParseException {
+    long time;
+
+    try {
+      synchronized (CHECKPOINT) {
+        time = CHECKPOINT.parse(name).getTime();
+      }
+    } catch (ParseException pe) {
+      // Check for old-style checkpoint directories left over
+      // after an upgrade from Hadoop 1.x
+      synchronized (OLD_CHECKPOINT) {
+        time = OLD_CHECKPOINT.parse(name).getTime();
+      }
+    }
+
+    return time;
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java Wed Sep 26 22:55:00 2012
@@ -488,7 +488,7 @@ public class FTPFileSystem extends FileS
       if (created) {
         String parentDir = parent.toUri().getPath();
         client.changeWorkingDirectory(parentDir);
-        created = created & client.makeDirectory(pathName);
+        created = created && client.makeDirectory(pathName);
       }
     } else if (isFile(client, absolute)) {
       throw new IOException(String.format(

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java Wed Sep 26 22:55:00 2012
@@ -77,7 +77,7 @@ public class FTPInputStream extends FSIn
     if (byteRead >= 0) {
       pos++;
     }
-    if (stats != null & byteRead >= 0) {
+    if (stats != null && byteRead >= 0) {
       stats.incrementBytesRead(1);
     }
     return byteRead;
@@ -93,7 +93,7 @@ public class FTPInputStream extends FSIn
     if (result > 0) {
       pos += result;
     }
-    if (stats != null & result > 0) {
+    if (stats != null && result > 0) {
       stats.incrementBytesRead(result);
     }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java Wed Sep 26 22:55:00 2012
@@ -128,7 +128,7 @@ public class RawLocalFs extends Delegate
     try {
       FileStatus fs = getFileStatus(f);
       // If f refers to a regular file or directory      
-      if ("".equals(target)) {
+      if (target.isEmpty()) {
         return fs;
       }
       // Otherwise f refers to a symlink
@@ -150,7 +150,7 @@ public class RawLocalFs extends Delegate
        * the readBasicFileAttributes method in java.nio.file.attributes 
        * when available.
        */
-      if (!"".equals(target)) {
+      if (!target.isEmpty()) {
         return new FileStatus(0, false, 0, 0, 0, 0, FsPermission.getDefault(), 
             "", "", new Path(target), f);        
       }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3InputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3InputStream.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3InputStream.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3InputStream.java Wed Sep 26 22:55:00 2012
@@ -113,7 +113,7 @@ class S3InputStream extends FSInputStrea
         pos++;
       }
     }
-    if (stats != null & result >= 0) {
+    if (stats != null && result >= 0) {
       stats.incrementBytesRead(1);
     }
     return result;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java Wed Sep 26 22:55:00 2012
@@ -300,7 +300,7 @@ public class NativeS3FileSystem extends 
   }
   
   private static String pathToKey(Path path) {
-    if (path.toUri().getScheme() != null && "".equals(path.toUri().getPath())) {
+    if (path.toUri().getScheme() != null && path.toUri().getPath().isEmpty()) {
       // allow uris without trailing slash after bucket to refer to root,
       // like s3n://mybucket
       return "";

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java Wed Sep 26 22:55:00 2012
@@ -17,11 +17,21 @@
  */
 package org.apache.hadoop.fs.shell;
 
-import java.io.IOException;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
 import java.io.InputStream;
+import java.io.IOException;
 import java.util.LinkedList;
 import java.util.zip.GZIPInputStream;
 
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.io.JsonEncoder;
+import org.apache.avro.Schema;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -37,6 +47,10 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodecFactory;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.codehaus.jackson.JsonEncoding;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.util.MinimalPrettyPrinter;
 
 /**
  * Display contents of files 
@@ -95,14 +109,14 @@ class Display extends FsCommand {
   
   /**
    * Same behavior as "-cat", but handles zip and TextRecordInputStream
-   * encodings. 
+   * and Avro encodings. 
    */ 
   public static class Text extends Cat {
     public static final String NAME = "text";
     public static final String USAGE = Cat.USAGE;
     public static final String DESCRIPTION =
       "Takes a source file and outputs the file in text format.\n" +
-      "The allowed formats are zip and TextRecordInputStream.";
+      "The allowed formats are zip and TextRecordInputStream and Avro.";
     
     @Override
     protected InputStream getInputStream(PathData item) throws IOException {
@@ -128,10 +142,18 @@ class Display extends FsCommand {
           CompressionCodecFactory cf = new CompressionCodecFactory(getConf());
           CompressionCodec codec = cf.getCodec(item.path);
           if (codec != null) {
+            i.seek(0);
             return codec.createInputStream(i);
           }
           break;
         }
+        case 0x4f62: { // 'O' 'b'
+          if (i.readByte() == 'j') {
+            i.close();
+            return new AvroFileInputStream(item.stat);
+          }
+          break;
+        }
       }
 
       // File is non-compressed, or not a file container we know.
@@ -187,4 +209,68 @@ class Display extends FsCommand {
       super.close();
     }
   }
+
+  /**
+   * This class transforms a binary Avro data file into an InputStream
+   * with data that is in a human readable JSON format.
+   */
+  protected static class AvroFileInputStream extends InputStream {
+    private int pos;
+    private byte[] buffer;
+    private ByteArrayOutputStream output;
+    private FileReader fileReader;
+    private DatumWriter<Object> writer;
+    private JsonEncoder encoder;
+
+    public AvroFileInputStream(FileStatus status) throws IOException {
+      pos = 0;
+      buffer = new byte[0];
+      GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
+      fileReader =
+        DataFileReader.openReader(new File(status.getPath().toUri()), reader);
+      Schema schema = fileReader.getSchema();
+      writer = new GenericDatumWriter<Object>(schema);
+      output = new ByteArrayOutputStream();
+      JsonGenerator generator =
+        new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
+      MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
+      prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
+      generator.setPrettyPrinter(prettyPrinter);
+      encoder = EncoderFactory.get().jsonEncoder(schema, generator);
+    }
+
+    /**
+     * Read a single byte from the stream.
+     */
+    @Override
+    public int read() throws IOException {
+      if (pos < buffer.length) {
+        return buffer[pos++];
+      }
+      if (!fileReader.hasNext()) {
+        return -1;
+      }
+      writer.write(fileReader.next(), encoder);
+      encoder.flush();
+      if (!fileReader.hasNext()) {
+        // Write a new line after the last Avro record.
+        output.write(System.getProperty("line.separator").getBytes());
+        output.flush();
+      }
+      pos = 0;
+      buffer = output.toByteArray();
+      output.reset();
+      return read();
+    }
+
+    /**
+      * Close the stream.
+      */
+    @Override
+    public void close() throws IOException {
+      fileReader.close();
+      output.close();
+      super.close();
+    }
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java Wed Sep 26 22:55:00 2012
@@ -707,7 +707,7 @@ public class ViewFileSystem extends File
     @Override
     public boolean mkdirs(Path dir, FsPermission permission)
         throws AccessControlException, FileAlreadyExistsException {
-      if (theInternalDir.isRoot & dir == null) {
+      if (theInternalDir.isRoot && dir == null) {
         throw new FileAlreadyExistsException("/ already exits");
       }
       // Note dir starts with /

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java Wed Sep 26 22:55:00 2012
@@ -750,7 +750,7 @@ public class ViewFs extends AbstractFile
     public void mkdir(final Path dir, final FsPermission permission,
         final boolean createParent) throws AccessControlException,
         FileAlreadyExistsException {
-      if (theInternalDir.isRoot & dir == null) {
+      if (theInternalDir.isRoot && dir == null) {
         throw new FileAlreadyExistsException("/ already exits");
       }
       throw readOnlyMountTable("mkdir", dir);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java Wed Sep 26 22:55:00 2012
@@ -276,7 +276,7 @@ public class ActiveStandbyElector implem
 
     String pathParts[] = znodeWorkingDir.split("/");
     Preconditions.checkArgument(pathParts.length >= 1 &&
-        "".equals(pathParts[0]),
+        pathParts[0].isEmpty(),
         "Invalid path: %s", znodeWorkingDir);
     
     StringBuilder sb = new StringBuilder();

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java Wed Sep 26 22:55:00 2012
@@ -241,7 +241,7 @@ public class SshFenceByTcpPort extends C
       sshPort = DEFAULT_SSH_PORT;
 
       // Parse optional user and ssh port
-      if (arg != null && !"".equals(arg)) {
+      if (arg != null && !arg.isEmpty()) {
         Matcher m = USER_PORT_RE.matcher(arg);
         if (!m.matches()) {
           throw new BadFencingConfigurationException(

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFCRpcServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFCRpcServer.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFCRpcServer.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFCRpcServer.java Wed Sep 26 22:55:00 2012
@@ -55,11 +55,10 @@ public class ZKFCRpcServer implements ZK
         new ZKFCProtocolServerSideTranslatorPB(this);
     BlockingService service = ZKFCProtocolService
         .newReflectiveBlockingService(translator);
-    this.server = RPC.getServer(
-        ZKFCProtocolPB.class,
-        service, bindAddr.getHostName(),
-            bindAddr.getPort(), HANDLER_COUNT, false, conf,
-            null /*secretManager*/);
+    this.server = new RPC.Builder(conf).setProtocol(ZKFCProtocolPB.class)
+        .setInstance(service).setBindAddress(bindAddr.getHostName())
+        .setPort(bindAddr.getPort()).setNumHandlers(HANDLER_COUNT)
+        .setVerbose(false).build();
     
     // set service-level authorization security policy
     if (conf.getBoolean(

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java Wed Sep 26 22:55:00 2012
@@ -677,6 +677,15 @@ public class HttpServer implements Filte
               "Problem in starting http server. Server handlers failed");
         }
       }
+      // Make sure there are no errors initializing the context.
+      Throwable unavailableException = webAppContext.getUnavailableException();
+      if (unavailableException != null) {
+        // Have to stop the webserver, or else its non-daemon threads
+        // will hang forever.
+        webServer.stop();
+        throw new IOException("Unable to initialize WebAppContext",
+            unavailableException);
+      }
     } catch (IOException e) {
       throw e;
     } catch (InterruptedException e) {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java Wed Sep 26 22:55:00 2012
@@ -192,7 +192,7 @@ public class DefaultStringifier<T> imple
       String[] parts = itemStr.split(SEPARATOR);
 
       for (String part : parts) {
-        if (!part.equals(""))
+        if (!part.isEmpty())
           list.add(stringifier.fromString(part));
       }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java Wed Sep 26 22:55:00 2012
@@ -25,6 +25,7 @@ import java.nio.channels.FileChannel;
 import java.nio.channels.WritableByteChannel;
 
 import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -36,6 +37,7 @@ import org.apache.hadoop.conf.Configurat
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class IOUtils {
+  public static final Log LOG = LogFactory.getLog(IOUtils.class);
 
   /**
    * Copies from one stream to another.
@@ -235,7 +237,7 @@ public class IOUtils {
       if (c != null) {
         try {
           c.close();
-        } catch(IOException e) {
+        } catch(Throwable e) {
           if (log != null && log.isDebugEnabled()) {
             log.debug("Exception in closing " + c, e);
           }
@@ -264,6 +266,7 @@ public class IOUtils {
       try {
         sock.close();
       } catch (IOException ignored) {
+        LOG.debug("Ignoring exception while closing socket", ignored);
       }
     }
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java Wed Sep 26 22:55:00 2012
@@ -18,8 +18,9 @@
 
 package org.apache.hadoop.io;
 
-import java.io.*;
-import java.util.*;
+import java.io.DataInput;
+import java.io.IOException;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -38,12 +39,11 @@ import org.apache.hadoop.util.Reflection
 @InterfaceStability.Stable
 public class WritableComparator implements RawComparator {
 
-  private static HashMap<Class, WritableComparator> comparators =
-    new HashMap<Class, WritableComparator>(); // registry
+  private static final ConcurrentHashMap<Class, WritableComparator> comparators 
+          = new ConcurrentHashMap<Class, WritableComparator>(); // registry
 
   /** Get a comparator for a {@link WritableComparable} implementation. */
-  public static synchronized 
-  WritableComparator get(Class<? extends WritableComparable> c) {
+  public static WritableComparator get(Class<? extends WritableComparable> c) {
     WritableComparator comparator = comparators.get(c);
     if (comparator == null) {
       // force the static initializers to run
@@ -76,12 +76,10 @@ public class WritableComparator implemen
   /** Register an optimized comparator for a {@link WritableComparable}
    * implementation. Comparators registered with this method must be
    * thread-safe. */
-  public static synchronized void define(Class c,
-                                         WritableComparator comparator) {
+  public static void define(Class c, WritableComparator comparator) {
     comparators.put(c, comparator);
   }
 
-
   private final Class<? extends WritableComparable> keyClass;
   private final WritableComparable key1;
   private final WritableComparable key2;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java Wed Sep 26 22:55:00 2012
@@ -2105,7 +2105,7 @@ public class TFile {
     }
 
     public boolean isSorted() {
-      return !strComparator.equals("");
+      return !strComparator.isEmpty();
     }
 
     public String getComparatorString() {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java Wed Sep 26 22:55:00 2012
@@ -125,7 +125,7 @@ class TFileDumper {
           dataSizeUncompressed += region.getRawSize();
         }
         properties.put("Data Block Bytes", Long.toString(dataSize));
-        if (reader.readerBCF.getDefaultCompressionName() != "none") {
+        if (!reader.readerBCF.getDefaultCompressionName().equals("none")) {
           properties.put("Data Block Uncompressed Bytes", Long
               .toString(dataSizeUncompressed));
           properties.put("Data Block Compression Ratio", String.format(

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java Wed Sep 26 22:55:00 2012
@@ -40,12 +40,12 @@ import org.apache.hadoop.util.Reflection
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Evolving
 public class SerializationFactory extends Configured {
-  
-  private static final Log LOG =
+
+  static final Log LOG =
     LogFactory.getLog(SerializationFactory.class.getName());
 
   private List<Serialization<?>> serializations = new ArrayList<Serialization<?>>();
-  
+
   /**
    * <p>
    * Serializations are found by reading the <code>io.serializations</code>
@@ -55,15 +55,21 @@ public class SerializationFactory extend
    */
   public SerializationFactory(Configuration conf) {
     super(conf);
-    for (String serializerName : conf.getStrings(
-      CommonConfigurationKeys.IO_SERIALIZATIONS_KEY,
-      new String[]{WritableSerialization.class.getName(),
-        AvroSpecificSerialization.class.getName(),
-        AvroReflectSerialization.class.getName()})) {
-      add(conf, serializerName);
+    if (conf.get(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY).equals("")) {
+      LOG.warn("Serialization for various data types may not be available. Please configure "
+          + CommonConfigurationKeys.IO_SERIALIZATIONS_KEY
+          + " properly to have serialization support (it is currently not set).");
+    } else {
+      for (String serializerName : conf.getStrings(
+          CommonConfigurationKeys.IO_SERIALIZATIONS_KEY, new String[] {
+              WritableSerialization.class.getName(),
+              AvroSpecificSerialization.class.getName(),
+              AvroReflectSerialization.class.getName() })) {
+        add(conf, serializerName);
+      }
     }
   }
-  
+
   @SuppressWarnings("unchecked")
   private void add(Configuration conf, String serializationName) {
     try {
@@ -101,5 +107,5 @@ public class SerializationFactory extend
     }
     return null;
   }
-  
+
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Wed Sep 26 22:55:00 2012
@@ -83,6 +83,8 @@ import org.apache.hadoop.util.Time;
  * 
  * @see Server
  */
+@InterfaceAudience.LimitedPrivate(value = { "Common", "HDFS", "MapReduce", "Yarn" })
+@InterfaceStability.Evolving
 public class Client {
   
   public static final Log LOG = LogFactory.getLog(Client.class);



Mime
View raw message