hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sur...@apache.org
Subject svn commit: r1362639 [1/3] - in /hadoop/common/branches/branch-1-win: ./ bin/ ivy/ lib/jdiff/ src/c++/libhdfs/ src/c++/libhdfs/m4/ src/c++/pipes/ src/c++/pipes/impl/ src/c++/task-controller/impl/ src/c++/utils/ src/c++/utils/impl/ src/contrib/streaming...
Date Tue, 17 Jul 2012 20:36:12 GMT
Author: suresh
Date: Tue Jul 17 20:36:07 2012
New Revision: 1362639

URL: http://svn.apache.org/viewvc?rev=1362639&view=rev
Log:
Merging changes upto 1.0.3 into branch-1-win

Added:
    hadoop/common/branches/branch-1-win/lib/jdiff/hadoop_1.0.1.xml
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/lib/jdiff/hadoop_1.0.1.xml
    hadoop/common/branches/branch-1-win/lib/jdiff/hadoop_1.0.2.xml
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/lib/jdiff/hadoop_1.0.2.xml
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/AutoInputFormat.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/streaming/AutoInputFormat.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/DumpTypedBytes.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/streaming/DumpTypedBytes.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/LoadTypedBytes.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/streaming/LoadTypedBytes.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/
      - copied from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/IdentifierResolver.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/IdentifierResolver.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/InputWriter.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/InputWriter.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/OutputReader.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/OutputReader.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/RawBytesInputWriter.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/RawBytesInputWriter.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/RawBytesOutputReader.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/RawBytesOutputReader.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/TextInputWriter.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/TextInputWriter.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/TextOutputReader.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/TextOutputReader.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/TypedBytesInputWriter.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/TypedBytesInputWriter.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/TypedBytesOutputReader.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/streaming/io/TypedBytesOutputReader.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/
      - copied from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/Type.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/Type.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesInput.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesInput.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesOutput.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesOutput.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesRecordInput.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesRecordInput.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesRecordOutput.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesRecordOutput.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritable.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritable.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritableInput.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritableInput.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritableOutput.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritableOutput.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/package.html
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/package.html
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/streaming/RawBytesMapApp.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/test/org/apache/hadoop/streaming/RawBytesMapApp.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/streaming/RawBytesReduceApp.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/test/org/apache/hadoop/streaming/RawBytesReduceApp.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestAutoInputFormat.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestAutoInputFormat.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestDumpTypedBytes.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestDumpTypedBytes.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestLoadTypedBytes.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestLoadTypedBytes.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TypedBytesMapApp.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TypedBytesMapApp.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TypedBytesReduceApp.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TypedBytesReduceApp.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/
      - copied from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestTypedBytesWritable.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestTypedBytesWritable.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/compress/SnappyCodec.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/io/compress/SnappyCodec.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/compress/snappy/
      - copied from r1339303, hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/io/compress/snappy/
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/compress/snappy/LoadSnappy.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/io/compress/snappy/LoadSnappy.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/authentication/util/KerberosUtil.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/security/authentication/util/KerberosUtil.java
    hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/ShuffleExceptionTracker.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/mapred/org/apache/hadoop/mapred/ShuffleExceptionTracker.java
    hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TimeBucketMetrics.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/mapred/org/apache/hadoop/mapred/TimeBucketMetrics.java
    hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/lib/BinaryPartitioner.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/mapred/org/apache/hadoop/mapred/lib/BinaryPartitioner.java
    hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/snappy/
      - copied from r1339303, hadoop/common/branches/branch-1.0/src/native/src/org/apache/hadoop/io/compress/snappy/
    hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c
    hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c
    hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapreduce/TestMROutputFormat.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/mapreduce/TestMROutputFormat.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestGroupsCaching.java
      - copied unchanged from r1339303, hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/security/TestGroupsCaching.java
Removed:
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/Makefile.in
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/aclocal.m4
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/config.guess
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/config.sub
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/configure
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/depcomp
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/install-sh
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/ltmain.sh
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/m4/libtool.m4
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/m4/ltoptions.m4
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/m4/ltsugar.m4
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/m4/ltversion.m4
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/m4/lt~obsolete.m4
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/missing
    hadoop/common/branches/branch-1-win/src/c++/pipes/Makefile.in
    hadoop/common/branches/branch-1-win/src/c++/pipes/aclocal.m4
    hadoop/common/branches/branch-1-win/src/c++/pipes/config.guess
    hadoop/common/branches/branch-1-win/src/c++/pipes/config.sub
    hadoop/common/branches/branch-1-win/src/c++/pipes/configure
    hadoop/common/branches/branch-1-win/src/c++/pipes/depcomp
    hadoop/common/branches/branch-1-win/src/c++/pipes/impl/config.h.in
    hadoop/common/branches/branch-1-win/src/c++/pipes/install-sh
    hadoop/common/branches/branch-1-win/src/c++/pipes/ltmain.sh
    hadoop/common/branches/branch-1-win/src/c++/pipes/missing
    hadoop/common/branches/branch-1-win/src/c++/utils/Makefile.in
    hadoop/common/branches/branch-1-win/src/c++/utils/aclocal.m4
    hadoop/common/branches/branch-1-win/src/c++/utils/config.guess
    hadoop/common/branches/branch-1-win/src/c++/utils/config.sub
    hadoop/common/branches/branch-1-win/src/c++/utils/configure
    hadoop/common/branches/branch-1-win/src/c++/utils/depcomp
    hadoop/common/branches/branch-1-win/src/c++/utils/impl/config.h.in
    hadoop/common/branches/branch-1-win/src/c++/utils/install-sh
    hadoop/common/branches/branch-1-win/src/c++/utils/ltmain.sh
    hadoop/common/branches/branch-1-win/src/c++/utils/missing
    hadoop/common/branches/branch-1-win/src/examples/pipes/Makefile.in
    hadoop/common/branches/branch-1-win/src/examples/pipes/aclocal.m4
    hadoop/common/branches/branch-1-win/src/examples/pipes/config.guess
    hadoop/common/branches/branch-1-win/src/examples/pipes/config.sub
    hadoop/common/branches/branch-1-win/src/examples/pipes/configure
    hadoop/common/branches/branch-1-win/src/examples/pipes/depcomp
    hadoop/common/branches/branch-1-win/src/examples/pipes/impl/config.h.in
    hadoop/common/branches/branch-1-win/src/examples/pipes/install-sh
    hadoop/common/branches/branch-1-win/src/examples/pipes/ltmain.sh
    hadoop/common/branches/branch-1-win/src/examples/pipes/missing
    hadoop/common/branches/branch-1-win/src/native/INSTALL
    hadoop/common/branches/branch-1-win/src/native/Makefile.in
    hadoop/common/branches/branch-1-win/src/native/aclocal.m4
    hadoop/common/branches/branch-1-win/src/native/config.h.in
    hadoop/common/branches/branch-1-win/src/native/config/config.guess
    hadoop/common/branches/branch-1-win/src/native/config/config.sub
    hadoop/common/branches/branch-1-win/src/native/config/depcomp
    hadoop/common/branches/branch-1-win/src/native/config/install-sh
    hadoop/common/branches/branch-1-win/src/native/config/ltmain.sh
    hadoop/common/branches/branch-1-win/src/native/config/missing
    hadoop/common/branches/branch-1-win/src/native/configure
Modified:
    hadoop/common/branches/branch-1-win/   (props changed)
    hadoop/common/branches/branch-1-win/.gitignore
    hadoop/common/branches/branch-1-win/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/branch-1-win/bin/hadoop
    hadoop/common/branches/branch-1-win/build.xml
    hadoop/common/branches/branch-1-win/ivy.xml
    hadoop/common/branches/branch-1-win/ivy/libraries.properties
    hadoop/common/branches/branch-1-win/src/c++/libhdfs/m4/apsupport.m4
    hadoop/common/branches/branch-1-win/src/c++/task-controller/impl/task-controller.c
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/HadoopStreaming.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java
    hadoop/common/branches/branch-1-win/src/core/core-default.xml
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/CommonConfigurationKeys.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileUtil.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/jmx/JMXJsonServlet.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/Groups.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/KerberosName.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/SecurityUtil.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
    hadoop/common/branches/branch-1-win/src/docs/releasenotes.html
    hadoop/common/branches/branch-1-win/src/examples/org/apache/hadoop/examples/Grep.java
    hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
    hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
    hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
    hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSImage.java
    hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
    hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNodeMXBean.java
    hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
    hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
    hadoop/common/branches/branch-1-win/src/mapred/   (props changed)
    hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/JobClient.java
    hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/JobInProgress.java
    hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/LinuxTaskController.java
    hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/QueueMetrics.java
    hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskLog.java
    hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskTracker.java
    hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java
    hadoop/common/branches/branch-1-win/src/native/Makefile.am
    hadoop/common/branches/branch-1-win/src/native/configure.ac
    hadoop/common/branches/branch-1-win/src/native/packageNativeHadoop.sh
    hadoop/common/branches/branch-1-win/src/test/commit-tests
    hadoop/common/branches/branch-1-win/src/test/findbugsExcludeFile.xml
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/TestFileUtil.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestAbandonBlock.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestDFSClientRetries.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/security/TestDelegationToken.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestStorageDirectoryFailure.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/compress/TestCodec.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/ipc/TestIPC.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestShuffleExceptionCount.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/KerberosTestUtils.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java

Propchange: hadoop/common/branches/branch-1-win/
------------------------------------------------------------------------------
  Merged /hadoop/common/branches/branch-1:r1293716,1301291,1301804,1302058,1302720,1303017,1303027,1304158,1310039,1325636,1331064,1333564
  Merged /hadoop/common/branches/branch-0.20:r932042

Modified: hadoop/common/branches/branch-1-win/.gitignore
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/.gitignore?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/.gitignore (original)
+++ hadoop/common/branches/branch-1-win/.gitignore Tue Jul 17 20:36:07 2012
@@ -20,6 +20,7 @@
 .project
 .settings
 .svn
+autom4te.cache
 build/
 build.properties
 build-fi/
@@ -33,6 +34,7 @@ conf/mapred-site.xml
 conf/hdfs-site.xml
 conf/hadoop-policy.xml
 conf/capacity-scheduler.xml
+conf/fair-scheduler.xml
 docs/api/
 ivy/ivy-*.jar
 ivy/maven-ant-tasks-*.jar
@@ -41,17 +43,45 @@ junit[0-9]*.properties
 logs/
 src/contrib/ec2/bin/hadoop-ec2-env.sh
 src/contrib/index/conf/index-config.xml
-src/contrib/chukwa/conf/alert.conf
-src/contrib/chukwa/conf/chukwa-slaves
-src/contrib/chukwa/conf/initial_adaptors
-src/contrib/chukwa/conf/mdl.xml
-src/contrib/chukwa/conf/chukwa-agents
-src/contrib/chukwa/conf/chukwa-env.sh
-src/contrib/chukwa/conf/chukwa-agent-conf.xml
-src/contrib/chukwa/conf/chukwa-collector-conf.xml
-src/contrib/chukwa/conf/collectors
 src/docs/build
 src/docs/cn/build
 src/docs/cn/src/documentation/sitemap.xmap
 src/docs/cn/uming.conf
 src/contrib/hdfsproxy/src/test/resources
+src/c++/(libhdfs|pipes|task-controller|utils)/Makefile
+src/c++/*/Makefile.in
+src/c++/*/aclocal.m4
+src/c++/*/config.guess
+src/c++/*/config.sub
+src/c++/*/configure
+src/c++/*/depcomp
+src/c++/*/install-sh
+src/c++/*/ltmain.sh
+src/c++/*/missing
+src/c++/*/impl/config.h.in
+src/c++/*/m4/libtool.m4
+src/c++/*/m4/ltoptions.m4
+src/c++/*/m4/ltsugar.m4
+src/c++/*/m4/ltversion.m4
+src/c++/*/m4/lt~obsolete.m4
+src/examples/pipes/configure
+src/examples/pipes/Makefile.in
+src/examples/pipes/aclocal.m4
+src/examples/pipes/config.guess
+src/examples/pipes/config.sub
+src/examples/pipes/depcomp
+src/examples/pipes/impl/config.h.in
+src/examples/pipes/install-sh
+src/examples/pipes/ltmain.sh
+src/examples/pipes/missing
+src/native/configure
+src/native/INSTALL
+src/native/Makefile.in
+src/native/aclocal.m4
+src/native/config.h.in
+src/native/config/config.guess
+src/native/config/config.sub
+src/native/config/depcomp
+src/native/config/install-sh
+src/native/config/ltmain.sh
+src/native/config/missing

Modified: hadoop/common/branches/branch-1-win/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.txt?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.txt Tue Jul 17 20:36:07 2012
@@ -154,12 +154,126 @@ Release 1.1.0 - unreleased
     HDFS-2741. Document the max transfer threads property for branch-1. Backport of HDFS-1866. (Markus Jelsma via harsh)
 
 
-Release 1.0.2 - unreleased
+Release 1.0.3 - 2012.05.07
 
   NEW FEATURES
 
   IMPROVEMENTS
 
+    MAPREDUCE-4017. Add jobname to jobsummary log (tgraves and Koji Noguchi
+    via bobby)
+
+  BUG FIXES
+
+    HADOOP-6924. Adds a directory to the list of directories to search for 
+    the libjvm.so file. The new directory is found by running a 'find' command
+    and the first output is taken. This was done to handle the build of Hadoop
+    with IBM's JDK. (Stephen Watt, Guillermo Cabrera and ddas) 
+
+    HADOOP-6941. Adds support for building Hadoop with IBM's JDK
+    (Stephen Watt, Eli and ddas)
+
+    HADOOP-8188. Fixes the build process to do with jsvc, with IBM's JDK 
+    as the underlying jdk. (ddas)
+
+    HDFS-3127. Do not throw exceptions when FSImage.restoreStorageDirs() 
+    fails. (Brandon Li via szetszwo)
+
+    MAPREDUCE-3377. Ensure OutputCommitter.checkOutputSpecs is called prior to
+    copying job.xml. (Jane Chen via acmurthy)
+
+    HADOOP-5528. Ensure BinaryPartitioner is present in mapred libs. (Klaas
+    Bosteels via acmurthy)
+
+    HADOOP-6963. In FileUtil.getDU(..), neither include the size of directories
+    nor follow symbolic links.  (Ravi Prakash via szetszwo)
+
+    HADOOP-8251. Fix SecurityUtil.fetchServiceTicket after HADOOP-6941. (todd)
+
+    HADOOP-8293. Fix the Makefile.am for the native library to include the
+    JNI path. (omalley)
+
+    MAPREDUCE-4154. streaming MR job succeeds even if the streaming command 
+    fails. (Devaraj Das via tgraves)
+
+    HDFS-119. Fix a bug in logSync(), which causes NameNode block forever. 
+    (shv)
+
+    HADOOP-8294. IPC Connection becomes unusable even if server address 
+    was temporarilly unresolvable. Backport of HADOOP-7428. (Kihwal Lee via 
+    mattf)
+
+    HDFS-3310. Make sure that we abort when no edit log directories are left.
+    (Colin Patrick McCabe via eli)
+
+    MAPREDUCE-4207. Remove System.out.println() in FileInputFormat
+    (Kihwal Lee via harsh)
+
+    HDFS-3265. PowerPc Build error. (Kumar Ravi via mattf)
+
+    HDFS-1041. DFSClient.getFileChecksum(..) should retry if connection to
+    the first datanode fails.  (szetszwo)
+
+    HADOOP-8338. Fix renew and cancel of RPC HDFS delegation tokens. (omalley)
+
+    HADOOP-8346. Makes oid changes to make SPNEGO work. Was broken due
+    to fixes introduced by the IBM JDK compatibility patch. (ddas)
+
+    HADOOP-8352. Regenerate configure scripts for the c++ compilation. 
+    (omalley)
+
+    HDFS-3061. Cached directory size in INodeDirectory can get permanently
+    out of sync with computed size, causing quota issues; port of HDFS-1487.
+    (Kihwal Lee via mattf)
+
+    HADOOP-7381. FindBugs OutOfMemoryError. (Joep Rottinghuis via mattf)
+
+    HADOOP-8151. Error handling in snappy decompressor throws invalid
+    exceptions. (Matt Foley)
+
+    HDFS-3374. hdfs' TestDelegationToken fails intermittently with a race
+    condition. (Owen O'Malley via mattf)
+
+    MAPREDUCE-3857. Grep example ignores mapred.job.queue.name.
+    (Jonathan Eagles via mattf)
+
+    MAPREDUCE-1238. mapred metrics shows negative count of waiting maps and
+    reduces (tgraves via bobby)
+
+    MAPREDUCE-4003. log.index (No such file or directory) AND Task process 
+    exit with nonzero status of 126. (Koji Noguchi via tgraves)
+
+    MAPREDUCE-4012. Hadoop Job setup error leaves no useful info to users
+    (when LinuxTaskController is used) (tgraves)
+
+    HADOOP-8027. Visiting /jmx on the daemon web interfaces may print unnecessary
+    error in logs (Aaron Myers and Hitesh Shah)
+
+Release 1.0.2 - 2012.03.24
+
+  NEW FEATURES
+
+    HADOOP-7206. Support Snappy compression. (Issei Yoshida and
+    Alejandro Abdelnur via vinodkv).
+
+    HDFS-2701. Cleanup FS* processIOError methods. (eli)
+
+    HDFS-2978. The NameNode should expose name dir statuses via JMX. (atm)
+
+  IMPROVEMENTS
+
+    MAPREDUCE-3773. Add queue metrics with buckets for job run times. (omalley
+    via acmurthy)
+
+    HADOOP-1722. Allow hadoop streaming to handle non-utf8 byte array. (Klaas
+    Bosteels and Matthias Lehmann via acmurthy)
+
+    HADOOP-5450. Add support for application-specific typecodes to typed
+    bytes. (Klaas Bosteels via acmurthy) 
+
+    HADOOP-8090. rename hadoop 64 bit rpm/deb package name. (Giridharan Kesavan
+    via mattf)
+
   BUG FIXES
 
     HADOOP-8050. Deadlock in metrics. (Kihwal Lee via mattf)
@@ -173,18 +287,44 @@ Release 1.0.2 - unreleased
     HDFS-3006. In WebHDFS, when the return body is empty, set the Content-Type
     to application/octet-stream instead of application/json.  (szetszwo)
 
+    MAPREDUCE-764. Fix TypedBytesInput.readRaw to preserve custom type codes.  
+    (Klaas Bosteels via acmurthy) 
+
+    HDFS-2703. removedStorageDirs is not updated everywhere we remove
+    a storage dir. (eli)
+
+    HDFS-2702. A single failed name dir can cause the NN to exit. (eli)
+
+    HDFS-3075. Backport HADOOP-4885: Try to restore failed name-node storage
+    directories at checkpoint time.  (Brandon Li via szetszwo)
+
+    HDFS-3101. Cannot read empty file using WebHDFS.  (szetszwo)
+
+    MAPREDUCE-3851.  Allow more aggressive action on detection of the jetty
+    issue (tgraves via bobby)
+
+    HADOOP-8088. User-group mapping cache incorrectly does negative caching on
+    transient failures (Kihwal Lee via bobby)
+
+    HADOOP-8132. 64bit secure datanodes do not start as the jsvc path is wrong
+    (Arpit Gupta via mattf)
+
+    HADOOP-8201. create the configure script for native compilation as part of
+    the build (Giri Kesavan via mattf)
+
 Release 1.0.1 - 2012.02.14
 
   NEW FEATURES
 
   IMPROVEMENTS
 
-    MAPREDUCE-3607. Port missing new API mapreduce lib classes to 1.x. (tomwhite)
+    MAPREDUCE-3607. Port missing new API mapreduce lib classes to
+    1.x. (tomwhite)
 
     HADOOP-7987. Support setting the run-as user in unsecure mode. (jitendra)
 
-    HADOOP-7988. Upper case in hostname part of the principals doesn't work with 
-    kerberos. (jitendra)
+    HADOOP-7988. Upper case in hostname part of the principals doesn't
+    work with kerberos. (jitendra)
 
     HADOOP-7470. Move up to Jackson 1.8.8.  (Enis Soztutar via szetszwo)
 
@@ -216,8 +356,9 @@ Release 1.0.1 - 2012.02.14
     MAPREDUCE-3343. TaskTracker Out of Memory because of distributed cache.
     (Zhao Yunjiong).
 
-    HADOOP-8037. Binary tarball does not preserve platform info for native builds,
-    and RPMs fail to provide needed symlinks for libhadoop.so.  (Matt Foley)
+    HADOOP-8037. Binary tarball does not preserve platform info for
+    native builds, and RPMs fail to provide needed symlinks for
+    libhadoop.so.  (Matt Foley)
 
 Release 1.0.0 - 2011.12.15
 
@@ -297,8 +438,9 @@ Release 1.0.0 - 2011.12.15
     HADOOP-7815. Fixed configuring map memory mb in hadoop-setup-conf.sh.
     (Ramya Sunil)
 
-    HDFS-2346. TestHost2NodesMap & TestReplicasMap will fail depending upon
-    execution order of test methods. (Laxman and Uma Maheswara Rao via Matt Foley)
+    HDFS-2346. TestHost2NodesMap & TestReplicasMap will fail depending
+    upon execution order of test methods. (Laxman and Uma Maheswara
+    Rao via Matt Foley)
 
     MAPREDUCE-3374. src/c++/task-controller/configure is not set executable in
     the tarball and that prevents task-controller from rebuilding.
@@ -372,7 +514,8 @@ Release 1.0.0 - 2011.12.15
 
     HDFS-2590. Fix the missing links in the WebHDFS forrest doc.  (szetszwo)
 
-    HADOOP-7854. UGI getCurrentUser is not synchronized. (Daryn Sharp via jitendra)
+    HADOOP-7854. UGI getCurrentUser is not synchronized. (Daryn Sharp
+    via jitendra)
 
     HADOOP-7865. Test Failures in 1.0 hdfs/common. (jitendra)
 
@@ -434,8 +577,8 @@ Release 0.20.205.0 - 2011.10.06
     HDFS-2385. Support renew and cancel delegation tokens in WebHDFS.
     (szetszwo)
 
-    MAPREDUCE-2777. Backport of MAPREDUCE-220 and MAPREDUCE-2469. Includes adding
-    cumulative CPU usage and total heap usage to task conters. (amarrk)
+    MAPREDUCE-2777. Backport of MAPREDUCE-220 and MAPREDUCE-2469. Includes 
+    adding cumulative CPU usage and total heap usage to task conters. (amarrk)
 
   BUG FIXES
 
@@ -453,8 +596,8 @@ Release 0.20.205.0 - 2011.10.06
     HADOOP-7685. Resolve issues with hadoop-common file hadoop-setup-conf.sh.
     (Eric Yang and Devaraj K, via mattf)
 
-    HADOOP-7684. jobhistory server and secondarynamenode should have init.d script
-    for rpm and deb. (Eric Yang via mattf)
+    HADOOP-7684. jobhistory server and secondarynamenode should have
+    init.d script for rpm and deb. (Eric Yang via mattf)
 
     HADOOP-7683. remove hdfs-site.xml template has properties that are not used
     in 0.20-security. (Arpit Gupta via mattf)
@@ -465,8 +608,8 @@ Release 0.20.205.0 - 2011.10.06
     HADOOP-7681. log4j.properties is missing properties for security audit and
     hdfs audit should be changed to info. (Arpit Gupta via mattf)
 
-    HADOOP-7679. log4j.properties templates must define mapred.jobsummary.logger
-    (Ramya Sunil via mattf)
+    HADOOP-7679. log4j.properties templates must define
+    mapred.jobsummary.logger (Ramya Sunil via mattf)
 
     HDFS-2325. Fuse-DFS fails to build on Hadoop 20.203.0
     (Kihwal Lee via mattf)
@@ -673,8 +816,8 @@ Release 0.20.205.0 - 2011.10.06
 
     MAPREDUCE-2928. MR-2413 improvements (Eli Collins via mattf)
 
-    HADOOP-7655. provide a small validation script that smoke tests the installed
-    cluster. (Arpit Gupta via mattf)
+    HADOOP-7655. provide a small validation script that smoke tests
+    the installed cluster. (Arpit Gupta via mattf)
 
     MAPREDUCE-2187. Reporter sends progress during sort/merge. (Anupam Seth via
     acmurthy)

Propchange: hadoop/common/branches/branch-1-win/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/branches/branch-0.20/CHANGES.txt:r932042
  Merged /hadoop/common/branches/branch-1/CHANGES.txt:r1293716,1301291,1301804,1302058,1302720,1303017,1303027,1304158,1310039,1311966,1311994,1325636,1331064,1333564
  Merged /hadoop/common/branches/branch-1.0.2/CHANGES.txt:r1304948

Modified: hadoop/common/branches/branch-1-win/bin/hadoop
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/bin/hadoop?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/bin/hadoop (original)
+++ hadoop/common/branches/branch-1-win/bin/hadoop Tue Jul 17 20:36:07 2012
@@ -315,16 +315,19 @@ if $cygwin; then
   HADOOP_LOG_DIR=`cygpath -w "$HADOOP_LOG_DIR"`
   TOOL_PATH=`cygpath -p -w "$TOOL_PATH"`
 fi
+
+#Determine the JAVA_PLATFORM
+JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} -Xmx32m ${HADOOP_JAVA_PLATFORM_OPTS} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
+  
+if [ "$JAVA_PLATFORM" = "Linux-amd64-64" ]; then
+  JSVC_ARCH="amd64"
+else
+  JSVC_ARCH="i386"
+fi
+
 # setup 'java.library.path' for native-hadoop code if necessary
 JAVA_LIBRARY_PATH=''
 if [ -d "${HADOOP_HOME}/build/native" -o -d "${HADOOP_HOME}/lib/native" -o -e "${HADOOP_PREFIX}/lib/libhadoop.a" ]; then
-  JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} -Xmx32m ${HADOOP_JAVA_PLATFORM_OPTS} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
-  
-  if [ "$JAVA_PLATFORM" = "Linux-amd64-64" ]; then
-    JSVC_ARCH="amd64"
-  else
-    JSVC_ARCH="i386"
-  fi
 
   if [ -d "$HADOOP_HOME/build/native" ]; then
     JAVA_LIBRARY_PATH=${HADOOP_HOME}/build/native/${JAVA_PLATFORM}/lib

Modified: hadoop/common/branches/branch-1-win/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/build.xml?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/build.xml (original)
+++ hadoop/common/branches/branch-1-win/build.xml Tue Jul 17 20:36:07 2012
@@ -162,7 +162,7 @@
 
   <property name="jdiff.build.dir" value="${build.docs}/jdiff"/>
   <property name="jdiff.xml.dir" value="${lib.dir}/jdiff"/>
-  <property name="jdiff.stable" value="1.0.0"/>
+  <property name="jdiff.stable" value="1.0.2"/>
   <property name="jdiff.stable.javadoc" 
             value="http://hadoop.apache.org/core/docs/r${jdiff.stable}/api/"/>
 
@@ -186,6 +186,7 @@
       <os arch="i486" />
       <os arch="i586" />
       <os arch="i686" />
+      <os arch="x86" />
     </or>
   </condition>
   <property name="jsvc.location" value="http://archive.apache.org/dist/commons/daemon/binaries/1.0.2/linux/commons-daemon-1.0.2-bin-linux-${os-arch}.tar.gz" />
@@ -206,10 +207,21 @@
   <property name="hadoop.conf.dir" value="/etc/hadoop"/>
 
   <!-- end of task-controller properties -->
+       
+  <!-- These args are passed along to FindBugs -->
+  <property name="findbugs.jvmargs" value="-Xmx512M" />
 
   <property name="package.buildroot" value="/tmp/hadoop_package_build_${user.name}"/>
   <property name="package.build.dir" value="/tmp/hadoop_package_build_${user.name}/BUILD"/>
 
+  <!-- Indicate is Snappy native library should be bundled with Hadoop or not -->
+  <property name="bundle.snappy" value="false"/>
+
+  <!-- Snappy native library location -->
+  <property name="snappy.prefix" value="/usr/local"/>
+  <property name="snappy.lib" value="${snappy.prefix}/lib"/>
+  <property name="snappy.include" value="${snappy.prefix}/include"/>
+
   <!-- IVY properteis set here -->
   <property name="ivy.dir" location="ivy" />
   <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
@@ -301,6 +313,9 @@
   <property name="build.dir.eclipse-test-resources" value="${build.dir.eclipse}/test-resources/"/>
   <property name="build.dir.eclipse-test-resources-webapps" value="${build.dir.eclipse}/test-resources/webapps"/>
 
+  <!-- Use environment -->
+  <property environment="env" />
+
   <!-- check if clover reports should be generated -->
   <condition property="clover.enabled">
     <and>
@@ -617,11 +632,19 @@
     </antcall> 
   </target>
 
-  <target name="compile-core-native" depends="compile-core-classes"
+  <target name="create-native-configure">
+    <exec executable="autoreconf" dir="${native.src.dir}" searchpath="yes" 
+          failonerror="yes">
+       <arg value="-if"/>
+    </exec>
+  </target>
+
+  <target name="compile-core-native" depends="create-native-configure, compile-core-classes"
           if="compile.native">
   	
     <mkdir dir="${build.native}/lib"/>
     <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/>
+    <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/snappy"/>
     <mkdir dir="${build.native}/src/org/apache/hadoop/io/nativeio"/>
     <mkdir dir="${build.native}/src/org/apache/hadoop/security"/>
 
@@ -635,6 +658,16 @@
       <class name="org.apache.hadoop.io.compress.zlib.ZlibDecompressor" />
   	</javah>
 
+    <javah
+      classpath="${build.classes}"
+      destdir="${build.native}/src/org/apache/hadoop/io/compress/snappy"
+      force="yes"
+      verbose="yes"
+      >
+      <class name="org.apache.hadoop.io.compress.snappy.SnappyCompressor"/>
+      <class name="org.apache.hadoop.io.compress.snappy.SnappyDecompressor"/>
+    </javah>
+
         <javah
           classpath="${build.classes}"
           destdir="${build.native}/src/org/apache/hadoop/io/nativeio"
@@ -666,7 +699,7 @@
 	  <env key="OS_ARCH" value="${os.arch}"/>
 	  <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
 	  <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
-	  <arg line="${native.src.dir}/configure"/>
+      <arg line="${native.src.dir}/configure"/>
     </exec>
 
     <exec dir="${build.native}" executable="${make.cmd}" failonerror="true">
@@ -1063,7 +1096,7 @@
                      value="@{test.krb5.conf.filename}"/>
         <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml" />
         <sysproperty key="java.library.path"
-                     value="${build.native}/lib:${lib.dir}/native/${build.platform}:${lib.file.path}" />
+                     value="${build.native}/lib:${lib.dir}/native/${build.platform}:${lib.file.path}:${snappy.lib}" />
         <sysproperty key="install.c++.examples"
                      value="${install.c++.examples}" />
         <sysproperty key="testjar"
@@ -1246,7 +1279,7 @@
 
     <findbugs home="${findbugs.home}" output="xml:withMessages"
         outputFile="${findbugs.report.xmlfile}" effort="max"
-        excludeFilter="${findbugs.exclude.file}" jvmargs="-Xmx512M">
+        excludeFilter="${findbugs.exclude.file}" jvmargs="${findbugs.jvmargs}">
       <auxClasspath>
         <fileset dir="${lib.dir}">
           <include name="**/*.jar"/>
@@ -1518,6 +1551,8 @@
 	  <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
 	  <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
 	  <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/>
+      <env key="BUNDLE_SNAPPY_LIB" value="${bundle.snappy}"/>
+      <env key="SNAPPY_LIB_DIR" value="${snappy.prefix}/lib"/>
 	  <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
     </exec>
 
@@ -1904,7 +1939,7 @@
     <copy todir="${system-test-build-dir}/${final.name}"
       file="${system-test-build-dir}/${test.final.name}.jar" overwrite="true"/>
     <macro_tar 
-      param.destfile="${system-test-build-dir}/${final.name}-${os.arch}-bin.tar.gz">
+      param.destfile="${system-test-build-dir}/${final.name}-${os-arch}-bin.tar.gz">
         <param.listofitems>
           <tarfileset dir="${system-test-build-dir}" mode="664">
             <exclude name="${final.name}/bin/*" />
@@ -1920,7 +1955,7 @@
   </target>
   
   <target name="binary" depends="bin-package" description="Make platform-specific binary tarball without source and documentation">
-    <macro_tar param.destfile="${build.dir}/${final.name}-${os.arch}-bin.tar.gz">
+    <macro_tar param.destfile="${build.dir}/${final.name}-${os-arch}-bin.tar.gz">
       <param.listofitems>
         <tarfileset dir="${build.dir}" mode="664">
           <exclude name="${final.name}/bin/*" />
@@ -1950,14 +1985,14 @@
     </macro_tar>
     <copy todir="${package.buildroot}/SOURCES">
       <fileset dir="${build.dir}">
-        <include name="${final.name}-${os.arch}-bin.tar.gz" />
+        <include name="${final.name}-${os-arch}-bin.tar.gz" />
       </fileset>
     </copy>
     <copy file="${src.dir}/packages/rpm/spec/hadoop.spec" todir="${package.buildroot}/SPECS">
       <filterchain>
         <replacetokens>
           <token key="final.name" value="${final.name}" />
-          <token key="build.arch" value="${os.arch}" />
+          <token key="build.arch" value="${os-arch}" />
           <token key="version" value="${hadoop.version}" />
           <token key="package.release" value="${package.release}" />
           <token key="package.build.dir" value="${package.build.dir}" />
@@ -1968,7 +2003,7 @@
         </replacetokens>
       </filterchain>
     </copy>
-    <rpm specFile="hadoop.spec" command="-bb --target ${os.arch}" topDir="${package.buildroot}" cleanBuildDir="true" failOnError="true"/>
+    <rpm specFile="hadoop.spec" command="-bb --target ${os-arch}" topDir="${package.buildroot}" cleanBuildDir="true" failOnError="true"/>
     <copy todir="${build.dir}/" flatten="true">
       <fileset dir="${package.buildroot}/RPMS">
         <include name="**/${name}*.rpm" />
@@ -2012,7 +2047,7 @@
         </replacetokens>
       </filterchain>
     </copy>
-    <deb destfile="${package.buildroot}/${name}_${hadoop.version}-${package.release}_${os.arch}.deb" control="${package.build.dir}/hadoop.control">
+    <deb destfile="${package.buildroot}/${name}_${hadoop.version}-${package.release}_${os-arch}.deb" control="${package.build.dir}/hadoop.control">
       <tarfileset dir="${build.dir}/${final.name}" filemode="644" prefix="${package.prefix}">
         <exclude name="bin/*" />
         <exclude name="sbin/*" />
@@ -2278,7 +2313,7 @@
   </target>
 
   <target name="compile-c++" 
-          depends="compile-c++-pipes"/>
+          depends="create-c++-configure,compile-c++-pipes"/>
 
   <target name="create-c++-examples-pipes-makefile" 
           depends="check-c++-makefiles" 

Modified: hadoop/common/branches/branch-1-win/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/ivy.xml?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/ivy.xml (original)
+++ hadoop/common/branches/branch-1-win/ivy.xml Tue Jul 17 20:36:07 2012
@@ -85,6 +85,11 @@
       rev="${commons-cli.version}"
       conf="client->default"/>
 
+    <dependency org="commons-io"
+      name="commons-io"
+      rev="${commons-io.version}"
+      conf="client->default"/>
+
     <dependency org="checkstyle"
       name="checkstyle"
       rev="${checkstyle.version}"

Modified: hadoop/common/branches/branch-1-win/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/ivy/libraries.properties?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/ivy/libraries.properties (original)
+++ hadoop/common/branches/branch-1-win/ivy/libraries.properties Tue Jul 17 20:36:07 2012
@@ -39,7 +39,7 @@ commons-logging-api.version=1.0.4
 commons-math.version=2.1
 commons-el.version=1.0
 commons-fileupload.version=1.2
-commons-io.version=1.4
+commons-io.version=2.1
 commons-net.version=1.4.1
 core.version=3.1.1
 coreplugin.version=1.3.2

Modified: hadoop/common/branches/branch-1-win/src/c++/libhdfs/m4/apsupport.m4
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/c%2B%2B/libhdfs/m4/apsupport.m4?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/c++/libhdfs/m4/apsupport.m4 (original)
+++ hadoop/common/branches/branch-1-win/src/c++/libhdfs/m4/apsupport.m4 Tue Jul 17 20:36:07 2012
@@ -71,7 +71,7 @@ AC_DEFUN([AP_SUPPORTED_HOST],[
   esac
 
   case $host_cpu in
-  powerpc)
+  powerpc*)
     CFLAGS="$CFLAGS -DCPU=\\\"$host_cpu\\\""
     HOST_CPU=$host_cpu;;
   sparc*)

Modified: hadoop/common/branches/branch-1-win/src/c++/task-controller/impl/task-controller.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/c%2B%2B/task-controller/impl/task-controller.c?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/c++/task-controller/impl/task-controller.c (original)
+++ hadoop/common/branches/branch-1-win/src/c++/task-controller/impl/task-controller.c Tue Jul 17 20:36:07 2012
@@ -590,6 +590,17 @@ int set_user(const char *user) {
   if (user_detail == NULL) {
     return -1;
   }
+
+  if (geteuid() == user_detail->pw_uid) {
+    return 0;
+  }
+
+  if (initgroups(user, user_detail->pw_gid) != 0) {
+    fprintf(LOGFILE, "Error setting supplementary groups for user %s: %s\n",
+        user, strerror(errno));
+    return -1;
+  }
+
   return change_effective_user(user_detail->pw_uid, user_detail->pw_gid);
 }
 

Modified: hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/HadoopStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/HadoopStreaming.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/HadoopStreaming.java (original)
+++ hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/HadoopStreaming.java Tue Jul 17 20:36:07 2012
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.streaming;
 
+import java.util.Arrays;
+
 import org.apache.hadoop.util.ToolRunner;
 
 /** The main entrypoint. Usually invoked with the script bin/hadoopStreaming
@@ -27,11 +29,28 @@ import org.apache.hadoop.util.ToolRunner
 public class HadoopStreaming {
 
   public static void main(String[] args) throws Exception {
+    if (args.length < 1) {
+      System.err.println("No Arguments Given!");
+      System.exit(1);
+    }
     int returnStatus = 0;
-    StreamJob job = new StreamJob();
-    returnStatus = ToolRunner.run(job, args);
+    String cmd = args[0];
+    String[] remainingArgs = Arrays.copyOfRange(args, 1, args.length);
+    if (cmd.equalsIgnoreCase("dumptb")) {
+      DumpTypedBytes dumptb = new DumpTypedBytes();
+      returnStatus = ToolRunner.run(dumptb, remainingArgs);
+    } else if (cmd.equalsIgnoreCase("loadtb")) {
+      LoadTypedBytes loadtb = new LoadTypedBytes();
+      returnStatus = ToolRunner.run(loadtb, remainingArgs);
+    } else if (cmd.equalsIgnoreCase("streamjob")) {
+      StreamJob job = new StreamJob();
+      returnStatus = ToolRunner.run(job, remainingArgs);
+    } else { // for backward compatibility
+      StreamJob job = new StreamJob();
+      returnStatus = ToolRunner.run(job, args);
+    }
     if (returnStatus != 0) {
-      System.err.println("Streaming Job Failed!");
+      System.err.println("Streaming Command Failed!");
       System.exit(returnStatus);
     }
   }

Modified: hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java (original)
+++ hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java Tue Jul 17 20:36:07 2012
@@ -19,8 +19,6 @@
 package org.apache.hadoop.streaming;
 
 import java.io.*;
-import java.nio.charset.CharacterCodingException;
-import java.io.IOException;
 import java.util.Date;
 import java.util.Map;
 import java.util.Iterator;
@@ -30,16 +28,20 @@ import java.util.Properties;
 
 import org.apache.commons.logging.*;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.streaming.io.InputWriter;
+import org.apache.hadoop.streaming.io.OutputReader;
+import org.apache.hadoop.streaming.io.TextInputWriter;
+import org.apache.hadoop.streaming.io.TextOutputReader;
 import org.apache.hadoop.util.LineReader;
+import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.UTF8ByteArrayUtils;
 
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.BytesWritable;
 
 import org.apache.hadoop.fs.FileSystem;
 
@@ -49,15 +51,49 @@ public abstract class PipeMapRed {
 
   protected static final Log LOG = LogFactory.getLog(PipeMapRed.class.getName());
 
-  /** The command to be spawned as a subprocess.
-   * Mapper/Reducer operations will delegate to it
+  /**
+   * Returns the Configuration.
    */
-  abstract String getPipeCommand(JobConf job);
-
-  abstract byte[] getFieldSeparator();
+  public Configuration getConfiguration() {
+    return job_;
+  }
+  
+  /**
+   * Returns the DataOutput to which the client input is written.
+   */
+  public DataOutput getClientOutput() {
+    return clientOut_;
+  }
+  
+  /**
+   * Returns the DataInput from which the client output is read.
+   */
+  public DataInput getClientInput() {
+    return clientIn_;
+  }
+  
+  /**
+   * Returns the input separator to be used.
+   */
+  public abstract byte[] getInputSeparator();
+  
+  /**
+   * Returns the field separator to be used.
+   */
+  public abstract byte[] getFieldSeparator();
 
-  abstract int getNumOfKeyFields();
+  /**
+   * Returns the number of key fields.
+   */
+  public abstract int getNumOfKeyFields();
 
+  
+  /** 
+   * Returns the command to be spawned as a subprocess.
+   * Mapper/Reducer operations will delegate to it
+   */
+  abstract String getPipeCommand(JobConf job);
+  
   abstract boolean getDoPipe();
 
   final static int OUTSIDE = 1;
@@ -120,7 +156,19 @@ public abstract class PipeMapRed {
 
       job_ = job;
       fs_ = FileSystem.get(job_);
-
+      
+      mapInputWriterClass_ = 
+        job_.getClass("stream.map.input.writer.class", 
+          TextInputWriter.class, InputWriter.class);
+      mapOutputReaderClass_ = 
+        job_.getClass("stream.map.output.reader.class",
+          TextOutputReader.class, OutputReader.class);
+      reduceInputWriterClass_ = 
+        job_.getClass("stream.reduce.input.writer.class",
+          TextInputWriter.class, InputWriter.class);
+      reduceOutputReaderClass_ = 
+        job_.getClass("stream.reduce.output.reader.class",
+          TextOutputReader.class, OutputReader.class);
       nonZeroExitIsFailure_ = job_.getBoolean("stream.non.zero.exit.is.failure", true);
       
       doPipe_ = getDoPipe();
@@ -280,13 +328,16 @@ public abstract class PipeMapRed {
     }
   }
 
-  void startOutputThreads(OutputCollector output, Reporter reporter) {
-    outThread_ = new MROutputThread(output, reporter);
+  void startOutputThreads(OutputCollector output, Reporter reporter) 
+    throws IOException {
+    inWriter_ = createInputWriter();
+    outReader_ = createOutputReader();
+    outThread_ = new MROutputThread(outReader_, output, reporter);
     outThread_.start();
     errThread_.setReporter(reporter);
   }
-
-  void waitOutputThreads() {
+  
+  void waitOutputThreads() throws IOException {
     try {
       if (outThread_ == null) {
         // This happens only when reducer has empty input(So reduce() is not
@@ -328,58 +379,46 @@ public abstract class PipeMapRed {
       //ignore
     }
   }
-
-  /**
-   * Split a line into key and value.
-   * @param line: a byte array of line containing UTF-8 bytes
-   * @param key: key of a record
-   * @param val: value of a record
-   * @throws IOException
-   */
-  void splitKeyVal(byte[] line, int length, Text key, Text val)
-  throws IOException {
-    int numKeyFields = getNumOfKeyFields();
-    byte[] separator = getFieldSeparator();
-    
-    // Need to find numKeyFields separators
-    int pos = UTF8ByteArrayUtils.findBytes(line, 0, length, separator);
-    for(int k=1; k<numKeyFields && pos!=-1; k++) {
-      pos = UTF8ByteArrayUtils.findBytes(line, pos + separator.length, 
-          length, separator);
-    }
-    try {
-      if (pos == -1) {
-        key.set(line, 0, length);
-        val.set("");
-      } else {
-        StreamKeyValUtil.splitKeyVal(line, 0, length, key, val, pos, separator.length);
-      }
-    } catch (CharacterCodingException e) {
-      LOG.warn(StringUtils.stringifyException(e));
-    }
+  
+  
+  abstract InputWriter createInputWriter() throws IOException;
+  
+  InputWriter createInputWriter(Class<? extends InputWriter> inputWriterClass) 
+    throws IOException {
+    InputWriter inputWriter =
+      ReflectionUtils.newInstance(inputWriterClass, job_);
+    inputWriter.initialize(this);
+    return inputWriter;
+  }
+
+  abstract OutputReader createOutputReader() throws IOException;
+
+  OutputReader createOutputReader(Class<? extends OutputReader> outputReaderClass) 
+    throws IOException {
+    OutputReader outputReader =
+      ReflectionUtils.newInstance(outputReaderClass, job_);
+    outputReader.initialize(this);
+    return outputReader;
   }
-
+  
+  
   class MROutputThread extends Thread {
 
-    MROutputThread(OutputCollector output, Reporter reporter) {
+    MROutputThread(OutputReader outReader, OutputCollector outCollector,
+      Reporter reporter) {
       setDaemon(true);
-      this.output = output;
+      this.outReader = outReader;
+      this.outCollector = outCollector;
       this.reporter = reporter;
     }
 
     public void run() {
-      LineReader lineReader = null;
       try {
-        Text key = new Text();
-        Text val = new Text();
-        Text line = new Text();
-        lineReader = new LineReader((InputStream)clientIn_, job_);
         // 3/4 Tool to Hadoop
-        while (lineReader.readLine(line) > 0) {
-          answer = line.getBytes();
-          splitKeyVal(answer, line.getLength(), key, val);
-          output.collect(key, val);
-          line.clear();
+        while (outReader.readKeyValue()) {
+          Object key = outReader.getCurrentKey();
+          Object value = outReader.getCurrentValue();
+          outCollector.collect(key, value);
           numRecWritten_++;
           long now = System.currentTimeMillis();
           if (now-lastStdoutReport > reporterOutDelay_) {
@@ -394,21 +433,11 @@ public abstract class PipeMapRed {
             logflush();
           }
         }
-        if (lineReader != null) {
-          lineReader.close();
-        }
-        if (clientIn_ != null) {
-          clientIn_.close();
-          clientIn_ = null;
-          LOG.info("MROutputThread done");
-        }
       } catch (Throwable th) {
         outerrThreadsThrowable = th;
         LOG.warn(StringUtils.stringifyException(th));
+      } finally {
         try {
-          if (lineReader != null) {
-            lineReader.close();
-          }
           if (clientIn_ != null) {
             clientIn_.close();
             clientIn_ = null;
@@ -419,9 +448,9 @@ public abstract class PipeMapRed {
       }
     }
 
-    OutputCollector output;
-    Reporter reporter;
-    byte[] answer;
+    OutputReader outReader = null;
+    OutputCollector outCollector = null;
+    Reporter reporter = null;
     long lastStdoutReport = 0;
     
   }
@@ -541,8 +570,13 @@ public abstract class PipeMapRed {
           clientOut_.close();
         }
       } catch (IOException io) {
+        LOG.warn(StringUtils.stringifyException(io));
+      }
+      try {
+        waitOutputThreads();
+      } catch (IOException io) {
+        LOG.warn(StringUtils.stringifyException(io));
       }
-      waitOutputThreads();
       if (sim != null) sim.destroy();
       logprintln("mapRedFinished");
     } catch (RuntimeException e) {
@@ -579,7 +613,7 @@ public abstract class PipeMapRed {
     //s += envline("PWD"); // =/home/crawler/hadoop/trunk
     s += "last Hadoop input: |" + mapredKey_ + "|\n";
     if (outThread_ != null) {
-      s += "last tool output: |" + outThread_.answer + "|\n";
+      s += "last tool output: |" + outReader_.getLastOutput() + "|\n";
     }
     s += "Date: " + new Date() + "\n";
     // s += envline("HADOOP_HOME");
@@ -611,37 +645,12 @@ public abstract class PipeMapRed {
     return msg;
   }
 
-  /**
-   * Write a value to the output stream using UTF-8 encoding
-   * @param value output value
-   * @throws IOException
-   */
-  void write(Object value) throws IOException {
-    byte[] bval;
-    int valSize;
-    if (value instanceof BytesWritable) {
-      BytesWritable val = (BytesWritable) value;
-      bval = val.getBytes();
-      valSize = val.getLength();
-    } else if (value instanceof Text) {
-      Text val = (Text) value;
-      bval = val.getBytes();
-      valSize = val.getLength();
-    } else {
-      String sval = value.toString();
-      bval = sval.getBytes("UTF-8");
-      valSize = bval.length;
-    }
-    clientOut_.write(bval, 0, valSize);
-  }
-
   long startTime_;
   long numRecRead_ = 0;
   long numRecWritten_ = 0;
   long numRecSkipped_ = 0;
   long nextRecReadLog_ = 1;
 
-  
   long minRecWrittenToEnableSkip_ = Long.MAX_VALUE;
 
   long reporterOutDelay_ = 10*1000L; 
@@ -656,9 +665,15 @@ public abstract class PipeMapRed {
   boolean debugFailDuring_;
   boolean debugFailLate_;
 
+  Class<? extends InputWriter> mapInputWriterClass_;
+  Class<? extends OutputReader> mapOutputReaderClass_;
+  Class<? extends InputWriter> reduceInputWriterClass_;
+  Class<? extends OutputReader> reduceOutputReaderClass_;
   boolean nonZeroExitIsFailure_;
   
   Process sim;
+  InputWriter inWriter_;
+  OutputReader outReader_;
   MROutputThread outThread_;
   String jobLog_;
   MRErrorThread errThread_;

Modified: hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java (original)
+++ hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java Tue Jul 17 20:36:07 2012
@@ -27,6 +27,9 @@ import org.apache.hadoop.mapred.Reporter
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.SkipBadRecords;
 import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.streaming.io.InputWriter;
+import org.apache.hadoop.streaming.io.OutputReader;
+import org.apache.hadoop.streaming.io.TextInputWriter;
 import org.apache.hadoop.util.StringUtils;
 
 /** A generic Mapper bridge.
@@ -66,9 +69,11 @@ public class PipeMapper extends PipeMapR
     //records input.
     SkipBadRecords.setAutoIncrMapperProcCount(job, false);
     skipping = job.getBoolean("mapred.skip.on", false);
-    String inputFormatClassName = job.getClass("mapred.input.format.class", TextInputFormat.class).getCanonicalName();
-    ignoreKey = inputFormatClassName.equals(TextInputFormat.class.getCanonicalName());
-
+    if (mapInputWriterClass_.getCanonicalName().equals(TextInputWriter.class.getCanonicalName())) {
+      String inputFormatClassName = job.getClass("mapred.input.format.class", TextInputFormat.class).getCanonicalName();
+      ignoreKey = inputFormatClassName.equals(TextInputFormat.class.getCanonicalName());
+    }
+    
     try {
       mapOutputFieldSeparator = job.get("stream.map.output.field.separator", "\t").getBytes("UTF-8");
       mapInputFieldSeparator = job.get("stream.map.input.field.separator", "\t").getBytes("UTF-8");
@@ -99,11 +104,9 @@ public class PipeMapper extends PipeMapR
       // 2/4 Hadoop to Tool
       if (numExceptions_ == 0) {
         if (!this.ignoreKey) {
-          write(key);
-          clientOut_.write(getInputSeparator());
+          inWriter_.writeKey(key);
         }
-        write(value);
-        clientOut_.write('\n');
+        inWriter_.writeValue(value);
         if(skipping) {
           //flush the streams on every record input if running in skip mode
           //so that we don't buffer other records surrounding a bad record. 
@@ -132,18 +135,29 @@ public class PipeMapper extends PipeMapR
     mapRedFinished();
   }
 
-  byte[] getInputSeparator() {
+  @Override
+  public byte[] getInputSeparator() {
     return mapInputFieldSeparator;
   }
 
   @Override
-  byte[] getFieldSeparator() {
+  public byte[] getFieldSeparator() {
     return mapOutputFieldSeparator;
   }
 
   @Override
-  int getNumOfKeyFields() {
+  public int getNumOfKeyFields() {
     return numOfMapOutputKeyFields;
   }
 
+  @Override
+  InputWriter createInputWriter() throws IOException {
+    return super.createInputWriter(mapInputWriterClass_);
+  }
+
+  @Override
+  OutputReader createOutputReader() throws IOException {
+    return super.createOutputReader(mapOutputReaderClass_);
+  }
+
 }

Modified: hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java (original)
+++ hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java Tue Jul 17 20:36:07 2012
@@ -28,6 +28,8 @@ import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.SkipBadRecords;
+import org.apache.hadoop.streaming.io.InputWriter;
+import org.apache.hadoop.streaming.io.OutputReader;
 import org.apache.hadoop.util.StringUtils;
 
 import org.apache.hadoop.io.Writable;
@@ -97,10 +99,8 @@ public class PipeReducer extends PipeMap
                                    + StringUtils.stringifyException(
                                                                     outerrThreadsThrowable));
           }
-          write(key);
-          clientOut_.write(getInputSeparator());
-          write(val);
-          clientOut_.write('\n');
+          inWriter_.writeKey(key);
+          inWriter_.writeValue(val);
         } else {
           // "identity reduce"
           output.collect(key, val);
@@ -137,18 +137,29 @@ public class PipeReducer extends PipeMap
     mapRedFinished();
   }
 
-  byte[] getInputSeparator() {
+  @Override
+  public byte[] getInputSeparator() {
     return reduceInputFieldSeparator;
   }
 
   @Override
-  byte[] getFieldSeparator() {
+  public byte[] getFieldSeparator() {
     return reduceOutFieldSeparator;
   }
   
   @Override
-  int getNumOfKeyFields() {
+  public int getNumOfKeyFields() {
     return numOfReduceOutputKeyFields;
   }
+  
+  @Override
+  InputWriter createInputWriter() throws IOException {
+    return super.createInputWriter(reduceInputWriterClass_);
+  }
+
+  @Override
+  OutputReader createOutputReader() throws IOException {
+    return super.createOutputReader(reduceOutputReaderClass_);
+  }
 
 }

Modified: hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java (original)
+++ hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java Tue Jul 17 20:36:07 2012
@@ -47,7 +47,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.filecache.DistributedCache;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.FileAlreadyExistsException;
 import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.FileOutputFormat;
@@ -63,7 +62,11 @@ import org.apache.hadoop.mapred.TextInpu
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorCombiner;
 import org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorReducer;
+import org.apache.hadoop.streaming.io.IdentifierResolver;
+import org.apache.hadoop.streaming.io.InputWriter;
+import org.apache.hadoop.streaming.io.OutputReader;
 import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 
@@ -284,6 +287,7 @@ public class StreamJob implements Tool {
       inReaderSpec_ = (String)cmdLine.getOptionValue("inputreader"); 
       mapDebugSpec_ = (String)cmdLine.getOptionValue("mapdebug");    
       reduceDebugSpec_ = (String)cmdLine.getOptionValue("reducedebug");
+      ioSpec_ = (String)cmdLine.getOptionValue("io");
       
       String[] car = cmdLine.getOptionValues("cacheArchive");
       if (null != car && car.length > 0){
@@ -408,6 +412,8 @@ public class StreamJob implements Tool {
                                     "File name URI", "fileNameURI", Integer.MAX_VALUE, false);
     Option cacheArchive = createOption("cacheArchive", 
                                        "File name URI", "fileNameURI", Integer.MAX_VALUE, false);
+    Option io = createOption("io",
+                             "Optional.", "spec", 1, false);
     
     // boolean properties
     
@@ -437,6 +443,7 @@ public class StreamJob implements Tool {
       addOption(cmdenv).
       addOption(cacheFile).
       addOption(cacheArchive).
+      addOption(io).
       addOption(verbose).
       addOption(info).
       addOption(debug).
@@ -467,6 +474,7 @@ public class StreamJob implements Tool {
     "To run this script when a map task fails ");
     System.out.println("  -reducedebug <path>  Optional." +
     " To run this script when a reduce task fails ");
+    System.out.println("  -io <identifier>  Optional.");
     System.out.println("  -verbose");
     System.out.println();
     GenericOptionsParser.printGenericCommandUsage(System.out);
@@ -689,9 +697,38 @@ public class StreamJob implements Tool {
 
     jobConf_.setInputFormat(fmt);
 
-    jobConf_.setOutputKeyClass(Text.class);
-    jobConf_.setOutputValueClass(Text.class);
-
+    if (ioSpec_ != null) {
+      jobConf_.set("stream.map.input", ioSpec_);
+      jobConf_.set("stream.map.output", ioSpec_);
+      jobConf_.set("stream.reduce.input", ioSpec_);
+      jobConf_.set("stream.reduce.output", ioSpec_);
+    }
+    
+    Class<? extends IdentifierResolver> idResolverClass = 
+      jobConf_.getClass("stream.io.identifier.resolver.class",
+        IdentifierResolver.class, IdentifierResolver.class);
+    IdentifierResolver idResolver = ReflectionUtils.newInstance(idResolverClass, jobConf_);
+    
+    idResolver.resolve(jobConf_.get("stream.map.input", IdentifierResolver.TEXT_ID));
+    jobConf_.setClass("stream.map.input.writer.class",
+      idResolver.getInputWriterClass(), InputWriter.class);
+    
+    idResolver.resolve(jobConf_.get("stream.reduce.input", IdentifierResolver.TEXT_ID));
+    jobConf_.setClass("stream.reduce.input.writer.class",
+      idResolver.getInputWriterClass(), InputWriter.class);
+    
+    idResolver.resolve(jobConf_.get("stream.map.output", IdentifierResolver.TEXT_ID));
+    jobConf_.setClass("stream.map.output.reader.class",
+      idResolver.getOutputReaderClass(), OutputReader.class);
+    jobConf_.setMapOutputKeyClass(idResolver.getOutputKeyClass());
+    jobConf_.setMapOutputValueClass(idResolver.getOutputValueClass());
+    
+    idResolver.resolve(jobConf_.get("stream.reduce.output", IdentifierResolver.TEXT_ID));
+    jobConf_.setClass("stream.reduce.output.reader.class",
+      idResolver.getOutputReaderClass(), OutputReader.class);
+    jobConf_.setOutputKeyClass(idResolver.getOutputKeyClass());
+    jobConf_.setOutputValueClass(idResolver.getOutputValueClass());
+    
     jobConf_.set("stream.addenvironment", addTaskEnvironment_);
 
     if (mapCmd_ != null) {
@@ -962,6 +999,7 @@ public class StreamJob implements Tool {
   protected String additionalConfSpec_;
   protected String mapDebugSpec_;
   protected String reduceDebugSpec_;
+  protected String ioSpec_;
 
   // Use to communicate config to the external processes (ex env.var.HADOOP_USER)
   // encoding "a=b c=d"

Modified: hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java (original)
+++ hadoop/common/branches/branch-1-win/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java Tue Jul 17 20:36:07 2012
@@ -25,6 +25,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * This class tests if hadoopStreaming returns Exception 
@@ -82,6 +83,25 @@ public class TestStreamingFailure extend
       }
     }
   }
+  
+  public void testStreamingFailureForFailedProcess() throws Exception {
+    int ret = 0;
+    try {
+      createInput();
+      String[] args = {
+          "-input", INPUT_FILE.getAbsolutePath(),
+          "-output", OUTPUT_DIR.getAbsolutePath(),
+          "-mapper", "/bin/ls dsdsdsds-does-not-exist",
+          "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data",
+              "/tmp"),
+      };
+      ret = ToolRunner.run(new StreamJob(), args);
+    } finally {
+      INPUT_FILE.delete();
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+    }
+    assertEquals("Streaming job failure code expected", 1, ret);
+  }
 
   public static void main(String[]args) throws Exception
   {

Modified: hadoop/common/branches/branch-1-win/src/core/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/core-default.xml?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/core-default.xml (original)
+++ hadoop/common/branches/branch-1-win/src/core/core-default.xml Tue Jul 17 20:36:07 2012
@@ -114,7 +114,7 @@
 
 <property>
   <name>io.compression.codecs</name>
-  <value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value>
+  <value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec</value>
   <description>A list of the compression codec classes that can be used 
                for compression/decompression.</description>
 </property>

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/CommonConfigurationKeys.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/CommonConfigurationKeys.java Tue Jul 17 20:36:07 2012
@@ -52,5 +52,17 @@ public class CommonConfigurationKeys {
                                         "ipc.server.read.threadpool.size";
   public static final int IPC_SERVER_RPC_READ_THREADS_DEFAULT = 1;
 
+  public static final String  IO_NATIVE_LIB_AVAILABLE_KEY =
+      "hadoop.native.lib";
+  /** Default value for IO_NATIVE_LIB_AVAILABLE_KEY */
+  public static final boolean IO_NATIVE_LIB_AVAILABLE_DEFAULT = true;
+
+  /** Internal buffer size for Snappy compressor/decompressors */
+  public static final String IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY =
+      "io.compression.codec.snappy.buffersize";
+
+  /** Default value for IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY */
+  public static final int IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT =
+      256 * 1024;
 }
 

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileUtil.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileUtil.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileUtil.java Tue Jul 17 20:36:07 2012
@@ -444,11 +444,18 @@ public class FileUtil {
     if (!dir.isDirectory()) {
       return dir.length();
     } else {
-      size = dir.length();
       File[] allFiles = dir.listFiles();
       if(allFiles != null) {
         for (int i = 0; i < allFiles.length; i++) {
-           size = size + getDU(allFiles[i]);
+          boolean isSymLink;
+          try {
+            isSymLink = org.apache.commons.io.FileUtils.isSymlink(allFiles[i]);
+          } catch(IOException ioe) {
+            isSymLink = true;
+          }
+          if(!isSymLink) {
+            size += getDU(allFiles[i]);
+          }
         }
       }
       return size;

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/ipc/Client.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/ipc/Client.java Tue Jul 17 20:36:07 2012
@@ -612,8 +612,12 @@ public class Client {
           start();
           return;
         }
-      } catch (IOException e) {
-        markClosed(e);
+      } catch (Throwable t) {
+        if (t instanceof IOException) {
+          markClosed((IOException)t);
+        } else {
+          markClosed(new IOException("Couldn't set up IO streams", t));
+        }
         close();
       }
     }

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/jmx/JMXJsonServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/jmx/JMXJsonServlet.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/jmx/JMXJsonServlet.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/jmx/JMXJsonServlet.java Tue Jul 17 20:36:07 2012
@@ -34,6 +34,7 @@ import javax.management.MBeanServer;
 import javax.management.MalformedObjectNameException;
 import javax.management.ObjectName;
 import javax.management.ReflectionException;
+import javax.management.RuntimeMBeanException;
 import javax.management.openmbean.CompositeData;
 import javax.management.openmbean.CompositeType;
 import javax.management.openmbean.TabularData;
@@ -249,6 +250,15 @@ public class JMXJsonServlet extends Http
     Object value = null;
     try {
       value = mBeanServer.getAttribute(oname, attName);
+    } catch (RuntimeMBeanException e) {
+      // UnsupportedOperationExceptions happen in the normal course of business,
+      // so no need to log them as errors all the time.
+      if (e.getCause() instanceof UnsupportedOperationException) {
+        LOG.debug("getting attribute "+attName+" of "+oname+" threw an exception", e);
+      } else {
+        LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
+      }
+      return;
     } catch (AttributeNotFoundException e) {
       //Ignored the attribute was not found, which should never happen because the bean
       //just told us that it has this attribute, but if this happens just don't output

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/Groups.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/Groups.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/Groups.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/Groups.java Tue Jul 17 20:36:07 2012
@@ -77,6 +77,9 @@ public class Groups {
     }
     // Create and cache user's groups
     groups = new CachedGroups(impl.getGroups(user));
+    if (groups.getGroups().isEmpty()) {
+      throw new IOException("No groups found for user " + user);
+    }
     userToGroupsMap.put(user, groups);
     LOG.debug("Returning fetched groups for '" + user + "'");
     return groups.getGroups();

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/KerberosName.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/KerberosName.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/KerberosName.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/KerberosName.java Tue Jul 17 20:36:07 2012
@@ -25,9 +25,7 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.hadoop.conf.Configuration;
-
-import sun.security.krb5.Config;
-import sun.security.krb5.KrbException;
+import org.apache.hadoop.security.authentication.util.KerberosUtil;
 
 /**
  * This class implements parsing and handling of Kerberos principal names. In 
@@ -73,13 +71,11 @@ public class KerberosName {
   private static List<Rule> rules;
 
   private static String defaultRealm;
-  private static Config kerbConf;
   
   static {
     try {
-      kerbConf = Config.getInstance();
-      defaultRealm = kerbConf.getDefaultRealm();
-    } catch (KrbException ke) {
+      defaultRealm = KerberosUtil.getDefaultRealm();
+    } catch (Exception ke) {
       if(UserGroupInformation.isSecurityEnabled())
         throw new IllegalArgumentException("Can't get Kerberos configuration",ke);
       else 

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/SecurityUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/SecurityUtil.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/SecurityUtil.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/security/SecurityUtil.java Tue Jul 17 20:36:07 2012
@@ -17,6 +17,10 @@
 package org.apache.hadoop.security;
 
 import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Field;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.URI;
@@ -42,9 +46,6 @@ import org.apache.hadoop.security.token.
 //this will need to be replaced someday when there is a suitable replacement
 import sun.net.dns.ResolverConfiguration;
 import sun.net.util.IPAddressUtil;
-import sun.security.jgss.krb5.Krb5Util;
-import sun.security.krb5.Credentials;
-import sun.security.krb5.PrincipalName;
 
 public class SecurityUtil {
   public static final Log LOG = LogFactory.getLog(SecurityUtil.class);
@@ -128,12 +129,41 @@ public class SecurityUtil {
     String serviceName = "host/" + remoteHost.getHost();
     if (LOG.isDebugEnabled())
       LOG.debug("Fetching service ticket for host at: " + serviceName);
-    Credentials serviceCred = null;
+    Object serviceCred = null;
+    Method credsToTicketMeth;
+    Class<?> krb5utilClass;
     try {
-      PrincipalName principal = new PrincipalName(serviceName,
-          PrincipalName.KRB_NT_SRV_HST);
-      serviceCred = Credentials.acquireServiceCreds(principal
-          .toString(), Krb5Util.ticketToCreds(getTgtFromSubject()));
+      Class<?> principalClass;
+      Class<?> credentialsClass;
+      
+      if (System.getProperty("java.vendor").contains("IBM")) {
+        principalClass = Class.forName("com.ibm.security.krb5.PrincipalName");
+        
+        credentialsClass = Class.forName("com.ibm.security.krb5.Credentials");
+        krb5utilClass = Class.forName("com.ibm.security.jgss.mech.krb5");
+      } else {
+        principalClass = Class.forName("sun.security.krb5.PrincipalName");
+        credentialsClass = Class.forName("sun.security.krb5.Credentials");
+        krb5utilClass = Class.forName("sun.security.jgss.krb5.Krb5Util");
+      }
+      @SuppressWarnings("rawtypes")
+      Constructor principalConstructor = principalClass.getConstructor(String.class, 
+          int.class);
+      Field KRB_NT_SRV_HST = principalClass.getDeclaredField("KRB_NT_SRV_HST");
+      Method acquireServiceCredsMeth = 
+          credentialsClass.getDeclaredMethod("acquireServiceCreds", 
+              String.class, credentialsClass);
+      Method ticketToCredsMeth = krb5utilClass.getDeclaredMethod("ticketToCreds", 
+          KerberosTicket.class);
+      credsToTicketMeth = krb5utilClass.getDeclaredMethod("credsToTicket", 
+          credentialsClass);
+      
+      Object principal = principalConstructor.newInstance(serviceName,
+          KRB_NT_SRV_HST.get(principalClass));
+      
+      serviceCred = acquireServiceCredsMeth.invoke(credentialsClass, 
+          principal.toString(), 
+          ticketToCredsMeth.invoke(krb5utilClass, getTgtFromSubject()));
     } catch (Exception e) {
       throw new IOException("Can't get service ticket for: "
           + serviceName, e);
@@ -141,8 +171,13 @@ public class SecurityUtil {
     if (serviceCred == null) {
       throw new IOException("Can't get service ticket for " + serviceName);
     }
-    Subject.getSubject(AccessController.getContext()).getPrivateCredentials()
-        .add(Krb5Util.credsToTicket(serviceCred));
+    try {
+      Subject.getSubject(AccessController.getContext()).getPrivateCredentials()
+          .add(credsToTicketMeth.invoke(krb5utilClass, serviceCred));
+    } catch (Exception e) {
+      throw new IOException("Can't get service ticket for: "
+          + serviceName, e);
+    }
   }
   
   /**



Mime
View raw message